Import Cobalt 21.master.0.301702
diff --git a/src/build/android/AndroidManifest.xml b/src/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..3c4ed29
--- /dev/null
+++ b/src/build/android/AndroidManifest.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy"
+    android:versionCode="1"
+    android:versionName="1.0">
+
+</manifest>
diff --git a/src/build/android/BUILD.gn b/src/build/android/BUILD.gn
new file mode 100644
index 0000000..1be9f47
--- /dev/null
+++ b/src/build/android/BUILD.gn
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/build_vars.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+import("//build/config/python.gni")
+import("//build_overrides/build.gni")
+
+if (enable_java_templates) {
+  # Create or update the API versions cache if necessary by running a
+  # functionally empty lint task. This prevents racy creation of the
+  # cache while linting java targets in android_lint.
+  android_lint("prepare_android_lint_cache") {
+    create_cache = true
+  }
+
+  if (enable_jdk_library_desugaring) {
+    dex_jdk_libs("all_jdk_libs") {
+      output = "$target_out_dir/$target_name.l8.dex"
+      min_sdk_version = default_min_sdk_version
+    }
+  }
+
+  generate_build_config_srcjar("build_config_gen") {
+    use_final_fields = false
+  }
+
+  java_library("build_config_java") {
+    supports_android = true
+    srcjar_deps = [ ":build_config_gen" ]
+    jar_excluded_patterns = [ "*/build/BuildConfig.class" ]
+  }
+
+  write_native_libraries_java("native_libraries_gen") {
+    use_final_fields = false
+  }
+
+  android_library("native_libraries_java") {
+    srcjar_deps = [ ":native_libraries_gen" ]
+
+    # New version of NativeLibraries.java (with the actual correct values) will
+    # be created when creating an apk.
+    jar_excluded_patterns = [ "*/NativeLibraries.class" ]
+  }
+}
+
+python_library("devil_chromium_py") {
+  pydeps_file = "devil_chromium.pydeps"
+  data = [
+    "devil_chromium.py",
+    "devil_chromium.json",
+    "//third_party/catapult/third_party/gsutil/",
+    "//third_party/catapult/devil/devil/devil_dependencies.json",
+
+    # Read by gn_helpers.BuildWithChromium()
+    "//build/config/gclient_args.gni",
+  ]
+}
+
+# Contains runtime deps for installing apks.
+# E.g. from test_runner.py or from apk_operations.py.
+group("apk_installer_data") {
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps = [
+      "//build/android/pylib/device/commands",
+      "//tools/android/md5sum",
+    ]
+    data = [
+      "//third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar",
+    ]
+  }
+}
+
+python_library("apk_operations_py") {
+  pydeps_file = "apk_operations.pydeps"
+  deps = [ ":apk_installer_data" ]
+}
+
+python_library("test_runner_py") {
+  testonly = true
+  pydeps_file = "test_runner.pydeps"
+  data = [
+    "pylib/gtest/filter/",
+    "pylib/instrumentation/render_test.html.jinja",
+    "test_wrapper/logdog_wrapper.py",
+    "${android_sdk_build_tools}/aapt",
+    "${android_sdk_build_tools}/dexdump",
+    "${android_sdk_build_tools}/lib64/libc++.so",
+    "${android_sdk_build_tools}/split-select",
+    "${android_sdk_root}/platform-tools/adb",
+    "//third_party/requests/",
+  ]
+  data_deps = [
+    ":apk_installer_data",
+    ":devil_chromium_py",
+    ":logdog_wrapper_py",
+    ":stack_tools",
+  ]
+
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps += [ "//tools/android/forwarder2" ]
+    data += [ "//tools/android/avd/proto/" ]
+    if (is_asan) {
+      data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+    }
+  }
+
+  # Proguard is needed only when using apks (rather than native executables).
+  if (enable_java_templates) {
+    deps = [ "//build/android/stacktrace:java_deobfuscate" ]
+  }
+}
+
+python_library("logdog_wrapper_py") {
+  pydeps_file = "test_wrapper/logdog_wrapper.pydeps"
+}
+
+python_library("resource_sizes_py") {
+  pydeps_file = "resource_sizes.pydeps"
+  data_deps = [
+    ":devil_chromium_py",
+    "//third_party/catapult/tracing:convert_chart_json",
+  ]
+  data = [
+    build_vars_file,
+    android_readelf,
+  ]
+}
+
+# Tools necessary for symbolizing tombstones or stack traces that are output to
+# logcat.
+# Hidden behind build_with_chromium because some third party repos that use
+# //build don't pull in //third_party/android_platform.
+# TODO(crbug.com/1120190): Move stack script into //build/third_party
+#     and enable unconditionally.
+group("stack_tools") {
+  if (build_with_chromium) {
+    data = [
+      "tombstones.py",
+      "pylib/symbols/",
+      "stacktrace/",
+    ]
+
+    data_deps =
+        [ "//third_party/android_platform/development/scripts:stack_py" ]
+  }
+}
+
+# GN evaluates each .gn file once per toolchain, so restricting to default
+# toolchain will ensure write_file() is called only once.
+assert(current_toolchain == default_toolchain)
+
+# NOTE: If other platforms would benefit from exporting variables, we should
+# move this to a more top-level place.
+# It is currently here (instead of //BUILD.gn) to ensure that the file is
+# written even for non-chromium embedders of //build.
+_build_vars_json = {
+  # Underscore prefix so that it appears at the top.
+  _HEADER = "Generated during 'gn gen' by //build/android/BUILD.gn."
+  forward_variables_from(android_build_vars_json, "*")
+}
+
+write_file(build_vars_file, _build_vars_json, "json")
diff --git a/src/build/android/CheckInstallApk-debug.apk b/src/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc3191
--- /dev/null
+++ b/src/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/src/build/android/DIR_METADATA b/src/build/android/DIR_METADATA
new file mode 100644
index 0000000..7a2580a
--- /dev/null
+++ b/src/build/android/DIR_METADATA
@@ -0,0 +1 @@
+os: ANDROID
diff --git a/src/build/android/adb_chrome_public_command_line b/src/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000..86ece8c
--- /dev/null
+++ b/src/build/android/adb_chrome_public_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+#   adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+#   adb_chrome_public_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@"
diff --git a/src/build/android/adb_command_line.py b/src/build/android/adb_command_line.py
new file mode 100755
index 0000000..c3ec8d4
--- /dev/null
+++ b/src/build/android/adb_command_line.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for reading / writing command-line flag files on device(s)."""
+
+from __future__ import print_function
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.tools import script_common
+from devil.utils import cmd_helper
+from devil.utils import logging_common
+
+
+def CheckBuildTypeSupportsFlags(device, command_line_flags_file):
+  is_webview = command_line_flags_file == 'webview-command-line'
+  if device.IsUserBuild() and is_webview:
+    raise device_errors.CommandFailedError(
+        'WebView only respects flags on a userdebug or eng device, yours '
+        'is a user build.', device)
+  elif device.IsUserBuild():
+    logging.warning(
+        'Your device (%s) is a user build; Chrome may or may not pick up '
+        'your commandline flags. Check your '
+        '"command_line_on_non_rooted_enabled" preference, or switch '
+        'devices.', device)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...]
+
+No flags: Prints existing command-line file.
+Empty string: Deletes command-line file.
+Otherwise: Writes command-line file.
+
+'''
+  parser.add_argument('--name', required=True,
+                      help='Name of file where to store flags on the device.')
+  parser.add_argument('-e', '--executable', dest='executable', default='chrome',
+                      help='(deprecated) No longer used.')
+  script_common.AddEnvironmentArguments(parser)
+  script_common.AddDeviceArguments(parser)
+  logging_common.AddLoggingArguments(parser)
+
+  args, remote_args = parser.parse_known_args()
+  devil_chromium.Initialize(adb_path=args.adb_path)
+  logging_common.InitializeLogging(args)
+
+  devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices,
+                                                    default_retries=0)
+  all_devices = device_utils.DeviceUtils.parallel(devices)
+
+  if not remote_args:
+    # No args == do not update, just print flags.
+    remote_args = None
+    action = ''
+  elif len(remote_args) == 1 and not remote_args[0]:
+    # Single empty string arg == delete flags
+    remote_args = []
+    action = 'Deleted command line file. '
+  else:
+    action = 'Wrote command line file. '
+
+  def update_flags(device):
+    CheckBuildTypeSupportsFlags(device, args.name)
+    changer = flag_changer.FlagChanger(device, args.name)
+    if remote_args is not None:
+      flags = changer.ReplaceFlags(remote_args)
+    else:
+      flags = changer.GetCurrentFlags()
+    return (device, device.build_description, flags)
+
+  updated_values = all_devices.pMap(update_flags).pGet(None)
+
+  print('%sCurrent flags (in %s):' % (action, args.name))
+  for d, desc, flags in updated_values:
+    if flags:
+      # Shell-quote flags for easy copy/paste as new args on the terminal.
+      quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags))
+    else:
+      quoted_flags = '( empty )'
+    print('  %s (%s): %s' % (d, desc, quoted_flags))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_gdb b/src/build/android/adb_gdb
new file mode 100755
index 0000000..6de4273
--- /dev/null
+++ b/src/build/android/adb_gdb
@@ -0,0 +1,1000 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Force locale to C to allow recognizing output from subprocesses.
+LC_ALL=C
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+COMMAND_SUFFIX=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+      rm -f "$GDBSERVER_PIDFILE"
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \
+          "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER"
+IDE_DIR="$DEFAULT_PULL_LIBS_DIR"
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ADB=
+ANNOTATE=
+CGDB=
+GDBINIT=
+GDBSERVER=
+HELP=
+IDE=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+ATTACH_DELAY=1
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --device=*)
+      export ANDROID_SERIAL=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ide)
+      IDE=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --port=*)
+      PORT=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --attach-delay=*)
+      ATTACH_DELAY=$optarg
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --output-directory=*)
+      CHROMIUM_OUTPUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --cgdb)
+      CGDB=cgdb
+      ;;
+    --cgdb=*)
+      CGDB=$optarg
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    -*)
+      panic "Unknown option $opt, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/lib/                (used by GYP builds)
+  \$CHROMIUM_SRC/<out>/lib.unstripped/     (used by GN builds)
+
+Where <out> is determined by CHROMIUM_OUTPUT_DIR, or --output-directory.
+
+You can set the path manually via --symbol-dir.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and a device is not specified with either --device or ANDROID_SERIAL).
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --cgdb[=<file>]       Use cgdb (an interface for gdb that shows the code).
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --output-directory=<path> Specify the output directory (e.g. "out/Debug").
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --attach-delay=<num>  Seconds to wait for gdbserver to attach to the
+                        remote process before starting gdb. Default 1.
+                        <num> may be a float if your sleep(1) supports it.
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+  --device=<file>       ADB device serial to use (-s flag).
+  --port=<port>         Specify the tcp port to use.
+  --ide                 Forward gdb port, but do not enter gdb console.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then
+  if [[ -e "build.ninja" ]]; then
+    CHROMIUM_OUTPUT_DIR=$PWD
+  else
+    panic "Please specify an output directory by using one of:
+       --output-directory=out/Debug
+       CHROMIUM_OUTPUT_DIR=out/Debug
+       Setting working directory to an output directory.
+       See --help."
+   fi
+fi
+
+if ls *.so >/dev/null 2>&1; then
+  panic ".so files found in your working directory. These will conflict with" \
+      "library lookup logic. Change your working directory and try again."
+fi
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_OUTPUT_DIR.
+#
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  # GYP places unstripped libraries under out/lib
+  # GN places them under out/lib.unstripped
+  local PARENT_DIR="$CHROMIUM_OUTPUT_DIR"
+  if [[ ! -e "$PARENT_DIR" ]]; then
+    PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR"
+  fi
+  SYMBOL_DIR="$PARENT_DIR/lib.unstripped"
+  if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+    SYMBOL_DIR="$PARENT_DIR/lib"
+    if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+      panic "Could not find any symbols under \
+$PARENT_DIR/lib{.unstripped}. Please build the program first!"
+    fi
+  fi
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir
+elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+  panic "Could not find any symbols under $SYMBOL_DIR"
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# Find the target architecture from a local shared library.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  # ls prints a broken pipe error when there are a lot of libs.
+  local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1)
+  local SO_DESC=$(file $RANDOM_LIB)
+  case $ARCH in
+    *32-bit*ARM,*) echo "arm";;
+    *64-bit*ARM,*) echo "arm64";;
+    *32-bit*Intel,*) echo "x86";;
+    *x86-64,*) echo "x86_64";;
+    *32-bit*MIPS,*) echo "mips";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    if [ "$HOST_ARCH" = "unknown" ]; then
+      # In case where "-p" returns "unknown" just use "-m" (machine hardware
+      # name). According to this patch from Fedora "-p" is equivalent to "-m"
+      # anyway: https://goo.gl/Pd47x3
+      HOST_ARCH=$(uname -m)
+    fi
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86 and x86_64!
+  if [ "$1" = "x86" -o "$1" = "x86_64" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH LD CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  LD=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  LD=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld")
+  if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld")
+  fi
+  if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld")
+  fi
+  if [ -z "$LD" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${LD%%ld}"
+}
+
+# $1: NDK install path
+get_ndk_host_gdb_client() {
+  local NDK_DIR="$1"
+  local HOST_OS HOST_ARCH
+
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT")
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Return the timestamp of a given file, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Allow several concurrent debugging sessions
+APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd)
+fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?"
+TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID"
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process32;;
+      arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;;
+      *) panic "Unknown app_process for architecture!";;
+esac
+
+# Default to app_process if bit-width specific process isn't found.
+adb_shell ls /system/bin/$GDBEXEC > /dev/null
+if [ $? != 0 ]; then
+    GDBEXEC=app_process
+fi
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+if [[ -n "$ANDROID_SERIAL" ]]; then
+  DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT"
+fi
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint"
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then
+  log "Auto-config: --pull-libs  (no cached libraries)"
+  PULL_LIBS=true
+else
+  HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint")
+  log "Host build fingerprint:   $HOST_FINGERPRINT"
+  if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+    log "Auto-config: --no-pull-libs (fingerprint match)"
+    NO_PULL_LIBS=true
+  else
+    log "Auto-config: --pull-libs  (fingerprint mismatch)"
+    PULL_LIBS=true
+  fi
+fi
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    panic "Can't find application process PID."
+  fi
+  log "Found process PID: $PID"
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX \"echo foo\""
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX \""
+  COMMAND_SUFFIX="\""
+else
+  SHELL_UID=$("$ADB" shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+    COMMAND_SUFFIX=
+  else
+    COMMAND_PREFIX=
+    COMMAND_SUFFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  # Remove the fingerprint file in case pulling one of the libs fails.
+  rm -f "$PULL_LIBS_DIR/build.fingerprint"
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/(system|apex|vendor)\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Writing the device fingerprint"
+  echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint"
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+SOLIB_DIRS=${SOLIB_DIRS%:}  # Strip trailing :
+
+# Applications with minSdkVersion >= 24 will have their data directories
+# created with rwx------ permissions, preventing adbd from forwarding to
+# the gdbserver socket.
+adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \
+    adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \
+    adb_shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+  # Random port to allow multiple concurrent sessions.
+  PORT=$(( $RANDOM % 1000 + 5039 ))
+fi
+HOST_PORT=$PORT
+TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)"
+"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+
+for i in 1 2; do
+  log "Starting gdbserver in the background:"
+  GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+  log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX"
+  "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 &
+  GDBSERVER_PID=$!
+  echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+  log "background job pid: $GDBSERVER_PID"
+
+  # Sleep to allow gdbserver to attach to the remote process and be
+  # ready to connect to.
+  log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive"
+  sleep "$ATTACH_DELAY"
+  log "Job control: $(jobs -l)"
+  STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+  if [ "$STATE" != "Running" ]; then
+    pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null)
+    if [[ -n "$pid_msg" ]]; then
+      old_pid=${pid_msg##* }
+      old_pid=${old_pid//[$'\r\n']}  # Trim trailing \r.
+      echo "Killing previous gdb server process (pid=$old_pid)"
+      adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX
+      continue
+    fi
+    echo "ERROR: GDBServer either failed to run or attach to PID $PID!"
+    echo "Here is the output from gdbserver (also try --verbose for more):"
+    echo "===== gdbserver.log start ====="
+    cat $GDBSERVER_LOG
+    echo ="===== gdbserver.log end ======"
+    exit 1
+  fi
+  break
+done
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+cat > "$COMMANDS" <<EOF
+set osabi GNU/Linux  # Copied from ndk-gdb.py.
+set print pretty 1
+python
+import sys
+sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')
+try:
+  import gdb_chrome
+finally:
+  sys.path.pop(0)
+end
+file $TMPDIR/$GDBEXEC
+directory $CHROMIUM_OUTPUT_DIR
+set solib-absolute-prefix $PULL_LIBS_DIR
+set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR
+
+python
+# Copied from ndk-gdb.py:
+def target_remote_with_retry(target, timeout_seconds):
+  import time
+  end_time = time.time() + timeout_seconds
+  while True:
+    try:
+      gdb.execute('target remote ' + target)
+      return True
+    except gdb.error as e:
+      time_left = end_time - time.time()
+      if time_left < 0 or time_left > timeout_seconds:
+        print("Error: unable to connect to device.")
+        print(e)
+        return False
+      time.sleep(min(0.25, time_left))
+
+print("Connecting to :$HOST_PORT...")
+if target_remote_with_retry(':$HOST_PORT', 5):
+  print("Attached! Reading symbols (takes ~30 seconds).")
+end
+EOF
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> "$COMMANDS"
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat "$COMMANDS"
+  echo "### END $COMMANDS"
+fi
+
+if [ "$IDE" ]; then
+  mkdir -p "$IDE_DIR"
+  SYM_GDB="$IDE_DIR/gdb"
+  SYM_EXE="$IDE_DIR/app_process"
+  SYM_INIT="$IDE_DIR/gdbinit"
+  ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE"
+  ln -sf "$COMMANDS" "$SYM_INIT"
+  # gdb doesn't work when symlinked, so create a wrapper.
+  echo
+  cat > $SYM_GDB <<EOF
+#!/bin/sh
+exec $GDB "\$@"
+EOF
+  chmod u+x $SYM_GDB
+
+  echo "GDB server listening on: localhost:$PORT"
+  echo "GDB wrapper script: $SYM_GDB"
+  echo "App executable: $SYM_EXE"
+  echo "gdbinit: $SYM_INIT"
+  echo "Connect with vscode: https://chromium.googlesource.com/chromium/src/+/master/docs/vscode.md#Launch-Commands"
+  echo "Showing gdbserver logs. Press Ctrl-C to disconnect."
+  tail -f "$GDBSERVER_LOG"
+else
+  log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+  echo "Server log: $GDBSERVER_LOG"
+  if [ "$CGDB" ]; then
+    $CGDB -d $GDB -- $GDB_ARGS -x "$COMMANDS"
+  else
+    $GDB $GDB_ARGS -x "$COMMANDS"
+  fi
+fi
diff --git a/src/build/android/adb_install_apk.py b/src/build/android/adb_install_apk.py
new file mode 100755
index 0000000..6ec98e2
--- /dev/null
+++ b/src/build/android/adb_install_apk.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  apk_group = parser.add_mutually_exclusive_group(required=True)
+  apk_group.add_argument('--apk', dest='apk_name',
+                         help='DEPRECATED The name of the apk containing the'
+                              ' application (with the .apk extension).')
+  apk_group.add_argument('apk_path', nargs='?',
+                         help='The path to the APK to install.')
+
+  # TODO(jbudorick): Remove once no clients pass --apk_package
+  parser.add_argument('--apk_package', help='DEPRECATED unused')
+  parser.add_argument('--split',
+                      action='append',
+                      dest='splits',
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument('--keep_data',
+                      action='store_true',
+                      default=False,
+                      help='Keep the package data when installing '
+                           'the application.')
+  parser.add_argument('--debug', action='store_const', const='Debug',
+                      dest='build_type',
+                      default=os.environ.get('BUILDTYPE', 'Debug'),
+                      help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  parser.add_argument('--release', action='store_const', const='Release',
+                      dest='build_type',
+                      help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  parser.add_argument('-d', '--device', dest='devices', action='append',
+                      default=[],
+                      help='Target device for apk to install on. Enter multiple'
+                           ' times for multiple devices.')
+  parser.add_argument('--adb-path', type=os.path.abspath,
+                      help='Absolute path to the adb binary to use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Enable verbose logging.')
+  parser.add_argument('--downgrade', action='store_true',
+                      help='If set, allows downgrading of apk.')
+  parser.add_argument('--timeout', type=int,
+                      default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT,
+                      help='Seconds to wait for APK installation. '
+                           '(default: %(default)s)')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose)
+  constants.SetBuildType(args.build_type)
+
+  devil_chromium.Initialize(
+      output_directory=constants.GetOutDirectory(),
+      adb_path=args.adb_path)
+
+  apk = args.apk_path or args.apk_name
+  if not apk.endswith('.apk'):
+    apk += '.apk'
+  if not os.path.exists(apk):
+    apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+    if not os.path.exists(apk):
+      parser.error('%s not found.' % apk)
+
+  if args.splits:
+    splits = []
+    base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+    for split_glob in args.splits:
+      apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+      if not apks:
+        logging.warning('No apks matched for %s.', split_glob)
+      for f in apks:
+        helper = apk_helper.ApkHelper(f)
+        if (helper.GetPackageName() == base_apk_package
+            and helper.GetSplitName()):
+          splits.append(f)
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  devices = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                    device_arg=args.devices)
+
+  def denylisting_install(device):
+    try:
+      if args.splits:
+        device.InstallSplitApk(apk, splits, reinstall=args.keep_data,
+                               allow_downgrade=args.downgrade)
+      else:
+        device.Install(apk, reinstall=args.keep_data,
+                       allow_downgrade=args.downgrade,
+                       timeout=args.timeout)
+    except (device_errors.CommandFailedError,
+            device_errors.DeviceUnreachableError):
+      logging.exception('Failed to install %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_failure')
+        logging.warning('Denylisting %s', str(device))
+    except device_errors.CommandTimeoutError:
+      logging.exception('Timed out while installing %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_timeout')
+        logging.warning('Denylisting %s', str(device))
+
+  device_utils.DeviceUtils.parallel(devices).pMap(denylisting_install)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_logcat_monitor.py b/src/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..a919722
--- /dev/null
+++ b/src/build/android/adb_logcat_monitor.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+from __future__ import print_function
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print('adb_logcat_monitor: %s already exists? Cleaning' % base_dir)
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except: # pylint: disable=bare-except
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print('adb_logcat_monitor: Initializing')
+    sys.exit(main(*sys.argv[1:3]))
+
+  print('Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0])
diff --git a/src/build/android/adb_logcat_printer.py b/src/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..a715170
--- /dev/null
+++ b/src/build/android/adb_logcat_printer.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  if options.output_path:
+    if not os.path.exists(os.path.dirname(options.output_path)):
+      logger.warning('Output dir %s doesn\'t exist. Creating it.',
+                      os.path.dirname(options.output_path))
+      os.makedirs(os.path.dirname(options.output_path))
+    output_file = open(options.output_path, 'w')
+    logger.info('Dumping logcat to local file %s. If running in a build, '
+                'this file will likely will be uploaded to google storage '
+                'in a later step. It can be downloaded from there.',
+                options.output_path)
+  else:
+    output_file = sys.stdout
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_profile_chrome b/src/build/android/adb_profile_chrome
new file mode 100755
index 0000000..d3244ff
--- /dev/null
+++ b/src/build/android/adb_profile_chrome
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@"
diff --git a/src/build/android/adb_profile_chrome_startup b/src/build/android/adb_profile_chrome_startup
new file mode 100755
index 0000000..d5836cd
--- /dev/null
+++ b/src/build/android/adb_profile_chrome_startup
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling for chrome startup.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@"
diff --git a/src/build/android/adb_reverse_forwarder.py b/src/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..90d3139
--- /dev/null
+++ b/src/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import argparse
+import sys
+import time
+
+import devil_chromium
+
+from devil.android import device_denylist
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.utils import run_tests_helper
+
+from pylib import constants
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+      usage='Usage: %(prog)s [options] device_port '
+            'host_port [device_port_2 host_port_2] ...',
+      description=__doc__)
+  parser.add_argument(
+      '-v', '--verbose',
+      dest='verbose_count',
+      default=0,
+      action='count',
+      help='Verbose level (multiple times for more)')
+  parser.add_argument(
+      '--device',
+      help='Serial number of device we should use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument(
+      '--debug',
+      action='store_const',
+      const='Debug',
+      dest='build_type',
+      default='Release',
+      help='DEPRECATED: use --output-directory instead.')
+  parser.add_argument(
+      '--output-directory',
+      help='Path to the root build directory.')
+  parser.add_argument(
+      'ports',
+      nargs='+',
+      type=int,
+      help='Port pair to reverse forward.')
+
+  args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if len(args.ports) < 2 or len(args.ports) % 2:
+    parser.error('Need even number of port pairs')
+
+  port_pairs = zip(args.ports[::2], args.ports[1::2])
+
+  if args.build_type:
+    constants.SetBuildType(args.build_type)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  device = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                   device_arg=args.device)[0]
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_system_webview_command_line b/src/build/android/adb_system_webview_command_line
new file mode 100755
index 0000000..a0d2705
--- /dev/null
+++ b/src/build/android/adb_system_webview_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_system_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_system_webview_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@"
diff --git a/src/build/android/android_only_explicit_jni_exports.lst b/src/build/android/android_only_explicit_jni_exports.lst
new file mode 100644
index 0000000..f989691
--- /dev/null
+++ b/src/build/android/android_only_explicit_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only JNI_OnLoad.
+# Should be used for libraries that do explicit JNI registration.
+
+{
+  global:
+    JNI_OnLoad;
+  local:
+    *;
+};
diff --git a/src/build/android/android_only_jni_exports.lst b/src/build/android/android_only_jni_exports.lst
new file mode 100644
index 0000000..1336fee
--- /dev/null
+++ b/src/build/android/android_only_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only symbols required for JNI to work.
+
+{
+  global:
+    JNI_OnLoad;
+    Java_*;
+  local:
+    *;
+};
diff --git a/src/build/android/apk_operations.py b/src/build/android/apk_operations.py
new file mode 100755
index 0000000..d6cd583
--- /dev/null
+++ b/src/build/android/apk_operations.py
@@ -0,0 +1,1970 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Using colorama.Fore/Back/Style members
+# pylint: disable=no-member
+
+from __future__ import print_function
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import posixpath
+import random
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+
+import adb_command_line
+import devil_chromium
+from devil import devil_env
+from devil.android import apk_helper
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.sdk import adb_wrapper
+from devil.android.sdk import build_tools
+from devil.android.sdk import intent
+from devil.android.sdk import version_codes
+from devil.utils import run_tests_helper
+
+_DIR_SOURCE_ROOT = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '..', '..'))
+_JAVA_HOME = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')):
+  import colorama
+
+from incremental_install import installer
+from pylib import constants
+from pylib.symbols import deobfuscator
+from pylib.utils import simpleperf
+from pylib.utils import app_bundle_utils
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')):
+  import bundletool
+
+BASE_MODULE = 'base'
+
+
+def _Colorize(text, style=''):
+  return (style
+      + text
+      + colorama.Style.RESET_ALL)
+
+
+def _InstallApk(devices, apk, install_dict):
+  def install(device):
+    if install_dict:
+      installer.Install(device, install_dict, apk=apk, permissions=[])
+    else:
+      device.Install(apk, permissions=[], allow_downgrade=True, reinstall=True)
+
+  logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
+  device_utils.DeviceUtils.parallel(devices).pMap(install)
+
+
+# A named tuple containing the information needed to convert a bundle into
+# an installable .apks archive.
+# Fields:
+#   bundle_path: Path to input bundle file.
+#   bundle_apk_path: Path to output bundle .apks archive file.
+#   aapt2_path: Path to aapt2 tool.
+#   keystore_path: Path to keystore file.
+#   keystore_password: Password for the keystore file.
+#   keystore_alias: Signing key name alias within the keystore file.
+#   system_image_locales: List of Chromium locales to include in system .apks.
+BundleGenerationInfo = collections.namedtuple(
+    'BundleGenerationInfo',
+    'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,'
+    'keystore_alias,system_image_locales')
+
+
+def _GenerateBundleApks(info,
+                        output_path=None,
+                        minimal=False,
+                        minimal_sdk_version=None,
+                        mode=None,
+                        optimize_for=None):
+  """Generate an .apks archive from a bundle on demand.
+
+  Args:
+    info: A BundleGenerationInfo instance.
+    output_path: Path of output .apks archive.
+    minimal: Create the minimal set of apks possible (english-only).
+    minimal_sdk_version: When minimal=True, use this sdkVersion.
+    mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+    optimize_for: Override split config, either None, or one of
+      app_bundle_utils.OPTIMIZE_FOR_OPTIONS.
+  """
+  logging.info('Generating .apks file')
+  app_bundle_utils.GenerateBundleApks(
+      info.bundle_path,
+      # Store .apks file beside the .aab file by default so that it gets cached.
+      output_path or info.bundle_apks_path,
+      info.aapt2_path,
+      info.keystore_path,
+      info.keystore_password,
+      info.keystore_alias,
+      system_image_locales=info.system_image_locales,
+      mode=mode,
+      minimal=minimal,
+      minimal_sdk_version=minimal_sdk_version,
+      optimize_for=optimize_for)
+
+
+def _InstallBundle(devices, apk_helper_instance, package_name,
+                   command_line_flags_file, modules, fake_modules):
+  # Path Chrome creates after validating fake modules. This needs to be cleared
+  # for pushed fake modules to be picked up.
+  SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
+  # Chrome command line flag needed for fake modules to work.
+  FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
+
+  def ShouldWarnFakeFeatureModuleInstallFlag(device):
+    if command_line_flags_file:
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
+    return False
+
+  def ClearFakeModules(device):
+    if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
+      device.RemovePath(
+          SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
+      logging.info('Removed %s', SPLITCOMPAT_PATH)
+    else:
+      logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+
+  def Install(device):
+    ClearFakeModules(device)
+    if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device):
+      # Print warning if command line is not set up for fake modules.
+      msg = ('Command line has no %s: Fake modules will be ignored.' %
+             FAKE_FEATURE_MODULE_INSTALL)
+      print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
+
+    device.Install(
+        apk_helper_instance,
+        permissions=[],
+        modules=modules,
+        fake_modules=fake_modules,
+        allow_downgrade=True)
+
+  # Basic checks for |modules| and |fake_modules|.
+  # * |fake_modules| cannot include 'base'.
+  # * If |fake_modules| is given, ensure |modules| includes 'base'.
+  # * They must be disjoint (checked by device.Install).
+  modules_set = set(modules) if modules else set()
+  fake_modules_set = set(fake_modules) if fake_modules else set()
+  if BASE_MODULE in fake_modules_set:
+    raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE))
+  if fake_modules_set and BASE_MODULE not in modules_set:
+    raise Exception(
+        '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
+
+  logging.info('Installing bundle.')
+  device_utils.DeviceUtils.parallel(devices).pMap(Install)
+
+
+def _UninstallApk(devices, install_dict, package_name):
+  def uninstall(device):
+    if install_dict:
+      installer.Uninstall(device, package_name)
+    else:
+      device.Uninstall(package_name)
+  device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
+
+
+def _IsWebViewProvider(apk_helper_instance):
+  meta_data = apk_helper_instance.GetAllMetadata()
+  meta_data_keys = [pair[0] for pair in meta_data]
+  return 'com.android.webview.WebViewLibrary' in meta_data_keys
+
+
+def _SetWebViewProvider(devices, package_name):
+
+  def switch_provider(device):
+    if device.build_version_sdk < version_codes.NOUGAT:
+      logging.error('No need to switch provider on pre-Nougat devices (%s)',
+                    device.serial)
+    else:
+      device.SetWebViewImplementation(package_name)
+
+  device_utils.DeviceUtils.parallel(devices).pMap(switch_provider)
+
+
+def _NormalizeProcessName(debug_process_name, package_name):
+  if not debug_process_name:
+    debug_process_name = package_name
+  elif debug_process_name.startswith(':'):
+    debug_process_name = package_name + debug_process_name
+  elif '.' not in debug_process_name:
+    debug_process_name = package_name + ':' + debug_process_name
+  return debug_process_name
+
+
+def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
+               url=None, apk=None, wait_for_java_debugger=False,
+               debug_process_name=None, nokill=None):
+  if argv and command_line_flags_file is None:
+    raise Exception('This apk does not support any flags.')
+  if url:
+    # TODO(agrieve): Launch could be changed to require only package name by
+    #     parsing "dumpsys package" rather than relying on the apk.
+    if not apk:
+      raise Exception('Launching with URL is not supported when using '
+                      '--package-name. Use --apk-path instead.')
+    view_activity = apk.GetViewActivityName()
+    if not view_activity:
+      raise Exception('APK does not support launching with URLs.')
+
+  debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+
+  def launch(device):
+    # --persistent is required to have Settings.Global.DEBUG_APP be set, which
+    # we currently use to allow reading of flags. https://crbug.com/784947
+    if not nokill:
+      cmd = ['am', 'set-debug-app', '--persistent', debug_process_name]
+      if wait_for_java_debugger:
+        cmd[-1:-1] = ['-w']
+      # Ignore error since it will fail if apk is not debuggable.
+      device.RunShellCommand(cmd, check_return=False)
+
+      # The flags are first updated with input args.
+      if command_line_flags_file:
+        changer = flag_changer.FlagChanger(device, command_line_flags_file)
+        flags = []
+        if argv:
+          adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                       command_line_flags_file)
+          flags = shlex.split(argv)
+        try:
+          changer.ReplaceFlags(flags)
+        except device_errors.AdbShellCommandFailedError:
+          logging.exception('Failed to set flags')
+
+    if url is None:
+      # Simulate app icon click if no url is present.
+      cmd = [
+          'am', 'start', '-p', package_name, '-c',
+          'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
+      ]
+      device.RunShellCommand(cmd, check_return=True)
+    else:
+      launch_intent = intent.Intent(action='android.intent.action.VIEW',
+                                    activity=view_activity, data=url,
+                                    package=package_name)
+      device.StartActivity(launch_intent)
+  device_utils.DeviceUtils.parallel(devices).pMap(launch)
+  if wait_for_java_debugger:
+    print('Waiting for debugger to attach to process: ' +
+          _Colorize(debug_process_name, colorama.Fore.YELLOW))
+
+
+def _ChangeFlags(devices, argv, command_line_flags_file):
+  if argv is None:
+    _DisplayArgs(devices, command_line_flags_file)
+  else:
+    flags = shlex.split(argv)
+    def update(device):
+      adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                   command_line_flags_file)
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      changer.ReplaceFlags(flags)
+    device_utils.DeviceUtils.parallel(devices).pMap(update)
+
+
+def _TargetCpuToTargetArch(target_cpu):
+  if target_cpu == 'x64':
+    return 'x86_64'
+  if target_cpu == 'mipsel':
+    return 'mips'
+  return target_cpu
+
+
+def _RunGdb(device, package_name, debug_process_name, pid, output_directory,
+            target_cpu, port, ide, verbose):
+  if not pid:
+    debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+    pid = device.GetApplicationPids(debug_process_name, at_most_one=True)
+  if not pid:
+    # Attaching gdb makes the app run so slow that it takes *minutes* to start
+    # up (as of 2018). Better to just fail than to start & attach.
+    raise Exception('App not running.')
+
+  gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
+  cmd = [
+      gdb_script_path,
+      '--package-name=%s' % package_name,
+      '--output-directory=%s' % output_directory,
+      '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
+      '--device=%s' % device.serial,
+      '--pid=%s' % pid,
+      '--port=%d' % port,
+  ]
+  if ide:
+    cmd.append('--ide')
+  # Enable verbose output of adb_gdb if it's set for this script.
+  if verbose:
+    cmd.append('--verbose')
+  if target_cpu:
+    cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
+  logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
+  print(_Colorize('All subsequent output is from adb_gdb script.',
+                  colorama.Fore.YELLOW))
+  os.execv(gdb_script_path, cmd)
+
+
+def _PrintPerDeviceOutput(devices, results, single_line=False):
+  for d, result in zip(devices, results):
+    if not single_line and d is not devices[0]:
+      sys.stdout.write('\n')
+    sys.stdout.write(
+          _Colorize('{} ({}):'.format(d, d.build_description),
+                    colorama.Fore.YELLOW))
+    sys.stdout.write(' ' if single_line else '\n')
+    yield result
+
+
+def _RunMemUsage(devices, package_name, query_app=False):
+  cmd_args = ['dumpsys', 'meminfo']
+  if not query_app:
+    cmd_args.append('--local')
+
+  def mem_usage_helper(d):
+    ret = []
+    for process in sorted(_GetPackageProcesses(d, package_name)):
+      meminfo = d.RunShellCommand(cmd_args + [str(process.pid)])
+      ret.append((process.name, '\n'.join(meminfo)))
+    return ret
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('No processes found.')
+    else:
+      for name, usage in sorted(result):
+        print(_Colorize('==== Output of "dumpsys meminfo %s" ====' % name,
+                        colorama.Fore.GREEN))
+        print(usage)
+
+
+def _DuHelper(device, path_spec, run_as=None):
+  """Runs "du -s -k |path_spec|" on |device| and returns parsed result.
+
+  Args:
+    device: A DeviceUtils instance.
+    path_spec: The list of paths to run du on. May contain shell expansions
+        (will not be escaped).
+    run_as: Package name to run as, or None to run as shell user. If not None
+        and app is not android:debuggable (run-as fails), then command will be
+        run as root.
+
+  Returns:
+    A dict of path->size in KiB containing all paths in |path_spec| that exist
+    on device. Paths that do not exist are silently ignored.
+  """
+  # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
+  # 144     /data/data/org.chromium.chrome/cache
+  # 8       /data/data/org.chromium.chrome/files
+  # <snip>
+  # du: .*: No such file or directory
+
+  # The -d flag works differently across android version, so use -s instead.
+  # Without the explicit 2>&1, stderr and stdout get combined at random :(.
+  cmd_str = 'du -s -k ' + path_spec + ' 2>&1'
+  lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
+                                 check_return=False)
+  output = '\n'.join(lines)
+  # run-as: Package 'com.android.chrome' is not debuggable
+  if output.startswith('run-as:'):
+    # check_return=False needed for when some paths in path_spec do not exist.
+    lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
+                                   check_return=False)
+  ret = {}
+  try:
+    for line in lines:
+      # du: .*: No such file or directory
+      if line.startswith('du:'):
+        continue
+      size, subpath = line.split(None, 1)
+      ret[subpath] = int(size)
+    return ret
+  except ValueError:
+    logging.error('du command was: %s', cmd_str)
+    logging.error('Failed to parse du output:\n%s', output)
+    raise
+
+
+def _RunDiskUsage(devices, package_name):
+  # Measuring dex size is a bit complicated:
+  # https://source.android.com/devices/tech/dalvik/jit-compiler
+  #
+  # For KitKat and below:
+  #   dumpsys package contains:
+  #     dataDir=/data/data/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-1.apk
+  #     resourcePath=/data/app/org.chromium.chrome-1.apk
+  #     nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
+  #   To measure odex:
+  #     ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
+  #
+  # For Android L and M (and maybe for N+ system apps):
+  #   dumpsys package contains:
+  #     codePath=/data/app/org.chromium.chrome-1
+  #     resourcePath=/data/app/org.chromium.chrome-1
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
+  #   To measure odex:
+  #     # Option 1:
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
+  #     ls -l /data/dalvik-cache/profiles/org.chromium.chrome
+  #         (these profiles all appear to be 0 bytes)
+  #     # Option 2:
+  #     ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
+  #
+  # For Android N+:
+  #   dumpsys package contains:
+  #     dataDir=/data/user/0/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
+  #     Instruction Set: arm
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
+  #       ilter=quicken]
+  #     Instruction Set: arm64
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
+  #       uicken]
+  #   To measure odex:
+  #     ls -l /data/app/.../oat/arm/base.odex
+  #     ls -l /data/app/.../oat/arm/base.vdex (optional)
+  #   To measure the correct odex size:
+  #     cmd package compile -m speed org.chromium.chrome  # For webview
+  #     cmd package compile -m speed-profile org.chromium.chrome  # For others
+  def disk_usage_helper(d):
+    package_output = '\n'.join(d.RunShellCommand(
+        ['dumpsys', 'package', package_name], check_return=True))
+    # Does not return error when apk is not installed.
+    if not package_output or 'Unable to find package:' in package_output:
+      return None
+
+    # Ignore system apks that have updates installed.
+    package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+                            package_output, flags=re.S | re.M)
+
+    try:
+      data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
+      code_path = re.search(r'codePath=(.*)', package_output).group(1)
+      lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
+                           package_output).group(1)
+    except AttributeError:
+      raise Exception('Error parsing dumpsys output: ' + package_output)
+
+    if code_path.startswith('/system'):
+      logging.warning('Measurement of system image apks can be innacurate')
+
+    compilation_filters = set()
+    # Match "compilation_filter=value", where a line break can occur at any spot
+    # (refer to examples above).
+    awful_wrapping = r'\s*'.join('compilation_filter=')
+    for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
+      compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
+    # Starting Android Q, output looks like:
+    #  arm: [status=speed-profile] [reason=install]
+    for m in re.finditer(r'\[status=(.+?)\]', package_output):
+      compilation_filters.add(m.group(1))
+    compilation_filter = ','.join(sorted(compilation_filters))
+
+    data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
+    # Measure code_cache separately since it can be large.
+    code_cache_sizes = {}
+    code_cache_dir = next(
+        (k for k in data_dir_sizes if k.endswith('/code_cache')), None)
+    if code_cache_dir:
+      data_dir_sizes.pop(code_cache_dir)
+      code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
+                                   run_as=package_name)
+
+    apk_path_spec = code_path
+    if not apk_path_spec.endswith('.apk'):
+      apk_path_spec += '/*.apk'
+    apk_sizes = _DuHelper(d, apk_path_spec)
+    if lib_path.endswith('/lib'):
+      # Shows architecture subdirectory.
+      lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
+    else:
+      lib_sizes = _DuHelper(d, lib_path)
+
+    # Look at all possible locations for odex files.
+    odex_paths = []
+    for apk_path in apk_sizes:
+      mangled_apk_path = apk_path[1:].replace('/', '@')
+      apk_basename = posixpath.basename(apk_path)[:-4]
+      for ext in ('dex', 'odex', 'vdex', 'art'):
+        # Easier to check all architectures than to determine active ones.
+        for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
+          odex_paths.append(
+              '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
+          # No app could possibly have more than 6 dex files.
+          for suffix in ('', '2', '3', '4', '5'):
+            odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
+                arch, mangled_apk_path, suffix, ext))
+            # This path does not have |arch|, so don't repeat it for every arch.
+            if arch == 'arm':
+              odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
+                  mangled_apk_path, suffix))
+
+    odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
+
+    return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+            compilation_filter)
+
+  def print_sizes(desc, sizes):
+    print('%s: %d KiB' % (desc, sum(sizes.itervalues())))
+    for path, size in sorted(sizes.iteritems()):
+      print('    %s: %s KiB' % (path, size))
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('APK is not installed.')
+      continue
+
+    (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+     compilation_filter) = result
+    total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
+
+    print_sizes('Apk', apk_sizes)
+    print_sizes('App Data (non-code cache)', data_dir_sizes)
+    print_sizes('App Data (code cache)', code_cache_sizes)
+    print_sizes('Native Libs', lib_sizes)
+    show_warning = compilation_filter and 'speed' not in compilation_filter
+    compilation_filter = compilation_filter or 'n/a'
+    print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
+    if show_warning:
+      logging.warning('For a more realistic odex size, run:')
+      logging.warning('    %s compile-dex [speed|speed-profile]', sys.argv[0])
+    print('Total: %s KiB (%.1f MiB)' % (total, total / 1024.0))
+
+
+class _LogcatProcessor(object):
+  ParsedLine = collections.namedtuple(
+      'ParsedLine',
+      ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message'])
+
+  class NativeStackSymbolizer(object):
+    """Buffers lines from native stacks and symbolizes them when done."""
+    # E.g.: #06 pc 0x0000d519 /apex/com.android.runtime/lib/libart.so
+    # E.g.: #01 pc 00180c8d  /data/data/.../lib/libbase.cr.so
+    _STACK_PATTERN = re.compile(r'\s*#\d+\s+(?:pc )?(0x)?[0-9a-f]{8,16}\s')
+
+    def __init__(self, stack_script_context, print_func):
+      # To symbolize native stacks, we need to pass all lines at once.
+      self._stack_script_context = stack_script_context
+      self._print_func = print_func
+      self._crash_lines_buffer = None
+
+    def _FlushLines(self):
+      """Prints queued lines after sending them through stack.py."""
+      crash_lines = self._crash_lines_buffer
+      self._crash_lines_buffer = None
+      with tempfile.NamedTemporaryFile() as f:
+        f.writelines(x[0].message + '\n' for x in crash_lines)
+        f.flush()
+        proc = self._stack_script_context.Popen(
+            input_file=f.name, stdout=subprocess.PIPE)
+        lines = proc.communicate()[0].splitlines()
+
+      for i, line in enumerate(lines):
+        parsed_line, dim = crash_lines[min(i, len(crash_lines) - 1)]
+        d = parsed_line._asdict()
+        d['message'] = line
+        parsed_line = _LogcatProcessor.ParsedLine(**d)
+        self._print_func(parsed_line, dim)
+
+    def AddLine(self, parsed_line, dim):
+      # Assume all lines from DEBUG are stacks.
+      # Also look for "stack-looking" lines to catch manual stack prints.
+      # It's important to not buffer non-stack lines because stack.py does not
+      # pass them through.
+      is_crash_line = parsed_line.tag == 'DEBUG' or (self._STACK_PATTERN.match(
+          parsed_line.message))
+
+      if is_crash_line:
+        if self._crash_lines_buffer is None:
+          self._crash_lines_buffer = []
+        self._crash_lines_buffer.append((parsed_line, dim))
+        return
+
+      if self._crash_lines_buffer is not None:
+        self._FlushLines()
+
+      self._print_func(parsed_line, dim)
+
+
+  # Logcat tags for messages that are generally relevant but are not from PIDs
+  # associated with the apk.
+  _ALLOWLISTED_TAGS = {
+      'ActivityManager',  # Shows activity lifecycle messages.
+      'ActivityTaskManager',  # More activity lifecycle messages.
+      'AndroidRuntime',  # Java crash dumps
+      'DEBUG',  # Native crash dump.
+  }
+
+  # Matches messages only on pre-L (Dalvik) that are spammy and unimportant.
+  _DALVIK_IGNORE_PATTERN = re.compile('|'.join([
+      r'^Added shared lib',
+      r'^Could not find ',
+      r'^DexOpt:',
+      r'^GC_',
+      r'^Late-enabling CheckJNI',
+      r'^Link of class',
+      r'^No JNI_OnLoad found in',
+      r'^Trying to load lib',
+      r'^Unable to resolve superclass',
+      r'^VFY:',
+      r'^WAIT_',
+  ]))
+
+  def __init__(self,
+               device,
+               package_name,
+               stack_script_context,
+               deobfuscate=None,
+               verbose=False):
+    self._device = device
+    self._package_name = package_name
+    self._verbose = verbose
+    self._deobfuscator = deobfuscate
+    self._native_stack_symbolizer = _LogcatProcessor.NativeStackSymbolizer(
+        stack_script_context, self._PrintParsedLine)
+    # Process ID for the app's main process (with no :name suffix).
+    self._primary_pid = None
+    # Set of all Process IDs that belong to the app.
+    self._my_pids = set()
+    # Set of all Process IDs that we've parsed at some point.
+    self._seen_pids = set()
+    # Start proc 22953:com.google.chromeremotedesktop/
+    self._pid_pattern = re.compile(r'Start proc (\d+):{}/'.format(package_name))
+    # START u0 {act=android.intent.action.MAIN \
+    # cat=[android.intent.category.LAUNCHER] \
+    # flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000
+    self._start_pattern = re.compile(r'START .*pkg=' + package_name)
+
+    self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random())
+    # Holds lines buffered on start-up, before we find our nonce message.
+    self._initial_buffered_lines = []
+    self._UpdateMyPids()
+    # Give preference to PID reported by "ps" over those found from
+    # _start_pattern. There can be multiple "Start proc" messages from prior
+    # runs of the app.
+    self._found_initial_pid = self._primary_pid != None
+    # Retrieve any additional patterns that are relevant for the User.
+    self._user_defined_highlight = None
+    user_regex = os.environ.get('CHROMIUM_LOGCAT_HIGHLIGHT')
+    if user_regex:
+      self._user_defined_highlight = re.compile(user_regex)
+      if not self._user_defined_highlight:
+        print(_Colorize(
+            'Rejecting invalid regular expression: {}'.format(user_regex),
+            colorama.Fore.RED + colorama.Style.BRIGHT))
+
+  def _UpdateMyPids(self):
+    # We intentionally do not clear self._my_pids to make sure that the
+    # ProcessLine method below also includes lines from processes which may
+    # have already exited.
+    self._primary_pid = None
+    for process in _GetPackageProcesses(self._device, self._package_name):
+      # We take only the first "main" process found in order to account for
+      # possibly forked() processes.
+      if ':' not in process.name and self._primary_pid is None:
+        self._primary_pid = process.pid
+      self._my_pids.add(process.pid)
+
+  def _GetPidStyle(self, pid, dim=False):
+    if pid == self._primary_pid:
+      return colorama.Fore.WHITE
+    elif pid in self._my_pids:
+      # TODO(wnwen): Use one separate persistent color per process, pop LRU
+      return colorama.Fore.YELLOW
+    elif dim:
+      return colorama.Style.DIM
+    return ''
+
+  def _GetPriorityStyle(self, priority, dim=False):
+    # pylint:disable=no-self-use
+    if dim:
+      return ''
+    style = colorama.Fore.BLACK
+    if priority == 'E' or priority == 'F':
+      style += colorama.Back.RED
+    elif priority == 'W':
+      style += colorama.Back.YELLOW
+    elif priority == 'I':
+      style += colorama.Back.GREEN
+    elif priority == 'D':
+      style += colorama.Back.BLUE
+    return style
+
+  def _ParseLine(self, line):
+    tokens = line.split(None, 6)
+
+    def consume_token_or_default(default):
+      return tokens.pop(0) if len(tokens) > 0 else default
+
+    def consume_integer_token_or_default(default):
+      if len(tokens) == 0:
+        return default
+
+      try:
+        return int(tokens.pop(0))
+      except ValueError:
+        return default
+
+    date = consume_token_or_default('')
+    invokation_time = consume_token_or_default('')
+    pid = consume_integer_token_or_default(-1)
+    tid = consume_integer_token_or_default(-1)
+    priority = consume_token_or_default('')
+    tag = consume_token_or_default('')
+    original_message = consume_token_or_default('')
+
+    # Example:
+    #   09-19 06:35:51.113  9060  9154 W GCoreFlp: No location...
+    #   09-19 06:01:26.174  9060 10617 I Auth    : [ReflectiveChannelBinder]...
+    # Parsing "GCoreFlp:" vs "Auth    :", we only want tag to contain the word,
+    # and we don't want to keep the colon for the message.
+    if tag and tag[-1] == ':':
+      tag = tag[:-1]
+    elif len(original_message) > 2:
+      original_message = original_message[2:]
+    return self.ParsedLine(
+        date, invokation_time, pid, tid, priority, tag, original_message)
+
+  def _PrintParsedLine(self, parsed_line, dim=False):
+    tid_style = colorama.Style.NORMAL
+    user_match = self._user_defined_highlight and (
+        re.search(self._user_defined_highlight, parsed_line.tag)
+        or re.search(self._user_defined_highlight, parsed_line.message))
+
+    # Make the main thread bright.
+    if not dim and parsed_line.pid == parsed_line.tid:
+      tid_style = colorama.Style.BRIGHT
+    pid_style = self._GetPidStyle(parsed_line.pid, dim)
+    msg_style = pid_style if not user_match else (colorama.Fore.GREEN +
+                                                  colorama.Style.BRIGHT)
+    # We have to pad before adding color as that changes the width of the tag.
+    pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style)
+    tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style)
+    tag = _Colorize('{:8}'.format(parsed_line.tag),
+                    pid_style + ('' if dim else colorama.Style.BRIGHT))
+    priority = _Colorize(parsed_line.priority,
+                         self._GetPriorityStyle(parsed_line.priority))
+    messages = [parsed_line.message]
+    if self._deobfuscator:
+      messages = self._deobfuscator.TransformLines(messages)
+    for message in messages:
+      message = _Colorize(message, msg_style)
+      sys.stdout.write('{} {} {} {} {} {}: {}\n'.format(
+          parsed_line.date, parsed_line.invokation_time, pid_str, tid_str,
+          priority, tag, message))
+
+  def _TriggerNonceFound(self):
+    # Once the nonce is hit, we have confidence that we know which lines
+    # belong to the current run of the app. Process all of the buffered lines.
+    if self._primary_pid:
+      for args in self._initial_buffered_lines:
+        self._native_stack_symbolizer.AddLine(*args)
+    self._initial_buffered_lines = None
+    self.nonce = None
+
+  def ProcessLine(self, line):
+    if not line or line.startswith('------'):
+      return
+
+    if self.nonce and self.nonce in line:
+      self._TriggerNonceFound()
+
+    nonce_found = self.nonce is None
+
+    log = self._ParseLine(line)
+    if log.pid not in self._seen_pids:
+      self._seen_pids.add(log.pid)
+      if nonce_found:
+        # Update list of owned PIDs each time a new PID is encountered.
+        self._UpdateMyPids()
+
+    # Search for "Start proc $pid:$package_name/" message.
+    if not nonce_found:
+      # Capture logs before the nonce. Start with the most recent "am start".
+      if self._start_pattern.match(log.message):
+        self._initial_buffered_lines = []
+
+      # If we didn't find the PID via "ps", then extract it from log messages.
+      # This will happen if the app crashes too quickly.
+      if not self._found_initial_pid:
+        m = self._pid_pattern.match(log.message)
+        if m:
+          # Find the most recent "Start proc" line before the nonce.
+          # Track only the primary pid in this mode.
+          # The main use-case is to find app logs when no current PIDs exist.
+          # E.g.: When the app crashes on launch.
+          self._primary_pid = m.group(1)
+          self._my_pids.clear()
+          self._my_pids.add(m.group(1))
+
+    owned_pid = log.pid in self._my_pids
+    if owned_pid and not self._verbose and log.tag == 'dalvikvm':
+      if self._DALVIK_IGNORE_PATTERN.match(log.message):
+        return
+
+    if owned_pid or self._verbose or (log.priority == 'F' or  # Java crash dump
+                                      log.tag in self._ALLOWLISTED_TAGS):
+      if nonce_found:
+        self._native_stack_symbolizer.AddLine(log, not owned_pid)
+      else:
+        self._initial_buffered_lines.append((log, not owned_pid))
+
+
+def _RunLogcat(device, package_name, stack_script_context, deobfuscate,
+               verbose):
+  logcat_processor = _LogcatProcessor(
+      device, package_name, stack_script_context, deobfuscate, verbose)
+  device.RunShellCommand(['log', logcat_processor.nonce])
+  for line in device.adb.Logcat(logcat_format='threadtime'):
+    try:
+      logcat_processor.ProcessLine(line)
+    except:
+      sys.stderr.write('Failed to process line: ' + line + '\n')
+      # Skip stack trace for the common case of the adb server being
+      # restarted.
+      if 'unexpected EOF' in line:
+        sys.exit(1)
+      raise
+
+
+def _GetPackageProcesses(device, package_name):
+  return [
+      p for p in device.ListProcesses(package_name)
+      if p.name == package_name or p.name.startswith(package_name + ':')]
+
+
+def _RunPs(devices, package_name):
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_processes = parallel_devices.pMap(
+      lambda d: _GetPackageProcesses(d, package_name)).pGet(None)
+  for processes in _PrintPerDeviceOutput(devices, all_processes):
+    if not processes:
+      print('No processes found.')
+    else:
+      proc_map = collections.defaultdict(list)
+      for p in processes:
+        proc_map[p.name].append(str(p.pid))
+      for name, pids in sorted(proc_map.items()):
+        print(name, ','.join(pids))
+
+
+def _RunShell(devices, package_name, cmd):
+  if cmd:
+    parallel_devices = device_utils.DeviceUtils.parallel(devices)
+    outputs = parallel_devices.RunShellCommand(
+        cmd, run_as=package_name).pGet(None)
+    for output in _PrintPerDeviceOutput(devices, outputs):
+      for line in output:
+        print(line)
+  else:
+    adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
+    cmd = [adb_path, '-s', devices[0].serial, 'shell']
+    # Pre-N devices do not support -t flag.
+    if devices[0].build_version_sdk >= version_codes.NOUGAT:
+      cmd += ['-t', 'run-as', package_name]
+    else:
+      print('Upon entering the shell, run:')
+      print('run-as', package_name)
+      print()
+    os.execv(adb_path, cmd)
+
+
+def _RunCompileDex(devices, package_name, compilation_filter):
+  cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
+         package_name]
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None)
+  for output in _PrintPerDeviceOutput(devices, outputs):
+    for line in output:
+      print(line)
+
+
+def _RunProfile(device, package_name, host_build_directory, pprof_out_path,
+                process_specifier, thread_specifier, extra_args):
+  simpleperf.PrepareDevice(device)
+  device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name)
+  with tempfile.NamedTemporaryFile() as fh:
+    host_simpleperf_out_path = fh.name
+
+    with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name,
+                                  process_specifier, thread_specifier,
+                                  extra_args, host_simpleperf_out_path):
+      sys.stdout.write('Profiler is running; press Enter to stop...')
+      sys.stdin.read(1)
+      sys.stdout.write('Post-processing data...')
+      sys.stdout.flush()
+
+    simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path,
+                                        host_build_directory, pprof_out_path)
+    print(textwrap.dedent("""
+        Profile data written to %(s)s.
+
+        To view profile as a call graph in browser:
+          pprof -web %(s)s
+
+        To print the hottest methods:
+          pprof -top %(s)s
+
+        pprof has many useful customization options; `pprof --help` for details.
+        """ % {'s': pprof_out_path}))
+
+
+class _StackScriptContext(object):
+  """Maintains temporary files needed by stack.py."""
+
+  def __init__(self,
+               output_directory,
+               apk_path,
+               bundle_generation_info,
+               quiet=False):
+    self._output_directory = output_directory
+    self._apk_path = apk_path
+    self._bundle_generation_info = bundle_generation_info
+    self._staging_dir = None
+    self._quiet = quiet
+
+  def _CreateStaging(self):
+    # In many cases, stack decoding requires APKs to map trace lines to native
+    # libraries. Create a temporary directory, and either unpack a bundle's
+    # APKS into it, or simply symlink the standalone APK into it. This
+    # provides an unambiguous set of APK files for the stack decoding process
+    # to inspect.
+    logging.debug('Creating stack staging directory')
+    self._staging_dir = tempfile.mkdtemp()
+    bundle_generation_info = self._bundle_generation_info
+
+    if bundle_generation_info:
+      # TODO(wnwen): Use apk_helper instead.
+      _GenerateBundleApks(bundle_generation_info)
+      logging.debug('Extracting .apks file')
+      with zipfile.ZipFile(bundle_generation_info.bundle_apks_path, 'r') as z:
+        files_to_extract = [
+            f for f in z.namelist() if f.endswith('-master.apk')
+        ]
+        z.extractall(self._staging_dir, files_to_extract)
+    elif self._apk_path:
+      # Otherwise an incremental APK and an empty apks directory is correct.
+      output = os.path.join(self._staging_dir, os.path.basename(self._apk_path))
+      os.symlink(self._apk_path, output)
+
+  def Close(self):
+    if self._staging_dir:
+      logging.debug('Clearing stack staging directory')
+      shutil.rmtree(self._staging_dir)
+      self._staging_dir = None
+
+  def Popen(self, input_file=None, **kwargs):
+    if self._staging_dir is None:
+      self._CreateStaging()
+    stack_script = os.path.join(
+        constants.host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+        'stack.py')
+    cmd = [
+        stack_script, '--output-directory', self._output_directory,
+        '--apks-directory', self._staging_dir
+    ]
+    if self._quiet:
+      cmd.append('--quiet')
+    if input_file:
+      cmd.append(input_file)
+    logging.info('Running stack.py')
+    return subprocess.Popen(cmd, **kwargs)
+
+
+def _GenerateAvailableDevicesMessage(devices):
+  devices_obj = device_utils.DeviceUtils.parallel(devices)
+  descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
+  msg = 'Available devices:\n'
+  for d, desc in zip(devices, descriptions):
+    msg += '  %s (%s)\n' % (d, desc)
+  return msg
+
+
+# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
+def _GenerateMissingAllFlagMessage(devices):
+  return ('More than one device available. Use --all to select all devices, ' +
+          'or use --device to select a device by serial.\n\n' +
+          _GenerateAvailableDevicesMessage(devices))
+
+
+def _DisplayArgs(devices, command_line_flags_file):
+  def flags_helper(d):
+    changer = flag_changer.FlagChanger(d, command_line_flags_file)
+    return changer.GetCurrentFlags()
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.pMap(flags_helper).pGet(None)
+  print('Existing flags per-device (via /data/local/tmp/{}):'.format(
+      command_line_flags_file))
+  for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
+    quoted_flags = ' '.join(pipes.quote(f) for f in flags)
+    print(quoted_flags or 'No flags set.')
+
+
+def _DeviceCachePath(device, output_directory):
+  file_name = 'device_cache_%s.json' % device.serial
+  return os.path.join(output_directory, file_name)
+
+
+def _LoadDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    if os.path.exists(cache_path):
+      logging.debug('Using device cache: %s', cache_path)
+      with open(cache_path) as f:
+        d.LoadCacheData(f.read())
+      # Delete the cached file so that any exceptions cause it to be cleared.
+      os.unlink(cache_path)
+    else:
+      logging.debug('No cache present for device: %s', d)
+
+
+def _SaveDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    with open(cache_path, 'w') as f:
+      f.write(d.DumpCacheData())
+      logging.info('Wrote device cache: %s', cache_path)
+
+
+class _Command(object):
+  name = None
+  description = None
+  long_description = None
+  needs_package_name = False
+  needs_output_directory = False
+  needs_apk_helper = False
+  supports_incremental = False
+  accepts_command_line_flags = False
+  accepts_args = False
+  need_device_args = True
+  all_devices_by_default = False
+  calls_exec = False
+  supports_multiple_devices = True
+
+  def __init__(self, from_wrapper_script, is_bundle):
+    self._parser = None
+    self._from_wrapper_script = from_wrapper_script
+    self.args = None
+    self.apk_helper = None
+    self.additional_apk_helpers = None
+    self.install_dict = None
+    self.devices = None
+    self.is_bundle = is_bundle
+    self.bundle_generation_info = None
+    # Only support  incremental install from APK wrapper scripts.
+    if is_bundle or not from_wrapper_script:
+      self.supports_incremental = False
+
+  def RegisterBundleGenerationInfo(self, bundle_generation_info):
+    self.bundle_generation_info = bundle_generation_info
+
+  def _RegisterExtraArgs(self, subp):
+    pass
+
+  def RegisterArgs(self, parser):
+    subp = parser.add_parser(
+        self.name, help=self.description,
+        description=self.long_description or self.description,
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    self._parser = subp
+    subp.set_defaults(command=self)
+    if self.need_device_args:
+      subp.add_argument('--all',
+                        action='store_true',
+                        default=self.all_devices_by_default,
+                        help='Operate on all connected devices.',)
+      subp.add_argument('-d',
+                        '--device',
+                        action='append',
+                        default=[],
+                        dest='devices',
+                        help='Target device for script to work on. Enter '
+                            'multiple times for multiple devices.')
+    subp.add_argument('-v',
+                      '--verbose',
+                      action='count',
+                      default=0,
+                      dest='verbose_count',
+                      help='Verbose level (multiple times for more)')
+    group = subp.add_argument_group('%s arguments' % self.name)
+
+    if self.needs_package_name:
+      # Three cases to consider here, since later code assumes
+      #  self.args.package_name always exists, even if None:
+      #
+      # - Called from a bundle wrapper script, the package_name is already
+      #   set through parser.set_defaults(), so don't call add_argument()
+      #   to avoid overriding its value.
+      #
+      # - Called from an apk wrapper script. The --package-name argument
+      #   should not appear, but self.args.package_name will be gleaned from
+      #   the --apk-path file later.
+      #
+      # - Called directly, then --package-name is required on the command-line.
+      #
+      if not self.is_bundle:
+        group.add_argument(
+            '--package-name',
+            help=argparse.SUPPRESS if self._from_wrapper_script else (
+                "App's package name."))
+
+    if self.needs_apk_helper or self.needs_package_name:
+      # Adding this argument to the subparser would override the set_defaults()
+      # value set by on the parent parser (even if None).
+      if not self._from_wrapper_script and not self.is_bundle:
+        group.add_argument(
+            '--apk-path', required=self.needs_apk_helper, help='Path to .apk')
+
+    if self.supports_incremental:
+      group.add_argument('--incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install an incremental apk.')
+      group.add_argument('--non-incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install a non-incremental apk.')
+
+    # accepts_command_line_flags and accepts_args are mutually exclusive.
+    # argparse will throw if they are both set.
+    if self.accepts_command_line_flags:
+      group.add_argument(
+          '--args', help='Command-line flags. Use = to assign args.')
+
+    if self.accepts_args:
+      group.add_argument(
+          '--args', help='Extra arguments. Use = to assign args')
+
+    if not self._from_wrapper_script and self.accepts_command_line_flags:
+      # Provided by wrapper scripts.
+      group.add_argument(
+          '--command-line-flags-file',
+          help='Name of the command-line flags file')
+
+    self._RegisterExtraArgs(group)
+
+  def _CreateApkHelpers(self, args, incremental_apk_path, install_dict):
+    """Returns true iff self.apk_helper was created and assigned."""
+    if self.apk_helper is None:
+      if args.apk_path:
+        self.apk_helper = apk_helper.ToHelper(args.apk_path)
+      elif incremental_apk_path:
+        self.install_dict = install_dict
+        self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
+      elif self.is_bundle:
+        _GenerateBundleApks(self.bundle_generation_info)
+        self.apk_helper = apk_helper.ToHelper(
+            self.bundle_generation_info.bundle_apks_path)
+    if args.additional_apk_paths and self.additional_apk_helpers is None:
+      self.additional_apk_helpers = [
+          apk_helper.ToHelper(apk_path)
+          for apk_path in args.additional_apk_paths
+      ]
+    return self.apk_helper is not None
+
+  def ProcessArgs(self, args):
+    self.args = args
+    # Ensure these keys always exist. They are set by wrapper scripts, but not
+    # always added when not using wrapper scripts.
+    args.__dict__.setdefault('apk_path', None)
+    args.__dict__.setdefault('incremental_json', None)
+
+    incremental_apk_path = None
+    install_dict = None
+    if args.incremental_json and not (self.supports_incremental and
+                                      args.non_incremental):
+      with open(args.incremental_json) as f:
+        install_dict = json.load(f)
+        incremental_apk_path = os.path.join(args.output_directory,
+                                            install_dict['apk_path'])
+        if not os.path.exists(incremental_apk_path):
+          incremental_apk_path = None
+
+    if self.supports_incremental:
+      if args.incremental and args.non_incremental:
+        self._parser.error('Must use only one of --incremental and '
+                           '--non-incremental')
+      elif args.non_incremental:
+        if not args.apk_path:
+          self._parser.error('Apk has not been built.')
+      elif args.incremental:
+        if not incremental_apk_path:
+          self._parser.error('Incremental apk has not been built.')
+        args.apk_path = None
+
+      if args.apk_path and incremental_apk_path:
+        self._parser.error('Both incremental and non-incremental apks exist. '
+                           'Select using --incremental or --non-incremental')
+
+
+    # Gate apk_helper creation with _CreateApkHelpers since for bundles it takes
+    # a while to unpack the apks file from the aab file, so avoid this slowdown
+    # for simple commands that don't need apk_helper.
+    if self.needs_apk_helper:
+      if not self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        self._parser.error('App is not built.')
+
+    if self.needs_package_name and not args.package_name:
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        args.package_name = self.apk_helper.GetPackageName()
+      elif self._from_wrapper_script:
+        self._parser.error('App is not built.')
+      else:
+        self._parser.error('One of --package-name or --apk-path is required.')
+
+    self.devices = []
+    if self.need_device_args:
+      abis = None
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        abis = self.apk_helper.GetAbis()
+      self.devices = device_utils.DeviceUtils.HealthyDevices(
+          device_arg=args.devices,
+          enable_device_files_cache=bool(args.output_directory),
+          default_retries=0,
+          abis=abis)
+      # TODO(agrieve): Device cache should not depend on output directory.
+      #     Maybe put into /tmp?
+      _LoadDeviceCaches(self.devices, args.output_directory)
+
+      try:
+        if len(self.devices) > 1:
+          if not self.supports_multiple_devices:
+            self._parser.error(device_errors.MultipleDevicesError(self.devices))
+          if not args.all and not args.devices:
+            self._parser.error(_GenerateMissingAllFlagMessage(self.devices))
+        # Save cache now if command will not get a chance to afterwards.
+        if self.calls_exec:
+          _SaveDeviceCaches(self.devices, args.output_directory)
+      except:
+        _SaveDeviceCaches(self.devices, args.output_directory)
+        raise
+
+
+class _DevicesCommand(_Command):
+  name = 'devices'
+  description = 'Describe attached devices.'
+  all_devices_by_default = True
+
+  def Run(self):
+    print(_GenerateAvailableDevicesMessage(self.devices))
+
+
+class _PackageInfoCommand(_Command):
+  name = 'package-info'
+  description = 'Show various attributes of this app.'
+  need_device_args = False
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    # Format all (even ints) as strings, to handle cases where APIs return None
+    print('Package name: "%s"' % self.args.package_name)
+    print('versionCode: %s' % self.apk_helper.GetVersionCode())
+    print('versionName: "%s"' % self.apk_helper.GetVersionName())
+    print('minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion())
+    print('targetSdkVersion: %s' % self.apk_helper.GetTargetSdkVersion())
+    print('Supported ABIs: %r' % self.apk_helper.GetAbis())
+
+
+class _InstallCommand(_Command):
+  name = 'install'
+  description = 'Installs the APK or bundle to one or more devices.'
+  needs_apk_helper = True
+  supports_incremental = True
+  default_modules = []
+
+  def _RegisterExtraArgs(self, group):
+    if self.is_bundle:
+      group.add_argument(
+          '-m',
+          '--module',
+          action='append',
+          default=self.default_modules,
+          help='Module to install. Can be specified multiple times.')
+      group.add_argument(
+          '-f',
+          '--fake',
+          action='append',
+          default=[],
+          help='Fake bundle module install. Can be specified multiple times. '
+          'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format(
+              BASE_MODULE))
+      # Add even if |self.default_modules| is empty, for consistency.
+      group.add_argument('--no-module',
+                         action='append',
+                         choices=self.default_modules,
+                         default=[],
+                         help='Module to exclude from default install.')
+
+  def Run(self):
+    if self.additional_apk_helpers:
+      for additional_apk_helper in self.additional_apk_helpers:
+        _InstallApk(self.devices, additional_apk_helper, None)
+    if self.is_bundle:
+      modules = list(
+          set(self.args.module) - set(self.args.no_module) -
+          set(self.args.fake))
+      _InstallBundle(self.devices, self.apk_helper, self.args.package_name,
+                     self.args.command_line_flags_file, modules, self.args.fake)
+    else:
+      _InstallApk(self.devices, self.apk_helper, self.install_dict)
+
+
+class _UninstallCommand(_Command):
+  name = 'uninstall'
+  description = 'Removes the APK or bundle from one or more devices.'
+  needs_package_name = True
+
+  def Run(self):
+    _UninstallApk(self.devices, self.install_dict, self.args.package_name)
+
+
+class _SetWebViewProviderCommand(_Command):
+  name = 'set-webview-provider'
+  description = ("Sets the device's WebView provider to this APK's "
+                 "package name.")
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    if not _IsWebViewProvider(self.apk_helper):
+      raise Exception('This package does not have a WebViewLibrary meta-data '
+                      'tag. Are you sure it contains a WebView implementation?')
+    _SetWebViewProvider(self.devices, self.args.package_name)
+
+
+class _LaunchCommand(_Command):
+  name = 'launch'
+  description = ('Sends a launch intent for the APK or bundle after first '
+                 'writing the command-line flags file.')
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('-w', '--wait-for-java-debugger', action='store_true',
+                       help='Pause execution until debugger attaches. Applies '
+                            'only to the main process. To have renderers wait, '
+                            'use --args="--renderer-wait-for-java-debugger"')
+    group.add_argument('--debug-process-name',
+                       help='Name of the process to debug. '
+                            'E.g. "privileged_process0", or "foo.bar:baz"')
+    group.add_argument('--nokill', action='store_true',
+                       help='Do not set the debug-app, nor set command-line '
+                            'flags. Useful to load a URL without having the '
+                             'app restart.')
+    group.add_argument('url', nargs='?', help='A URL to launch with.')
+
+  def Run(self):
+    if self.args.url and self.is_bundle:
+      # TODO(digit): Support this, maybe by using 'dumpsys' as described
+      # in the _LaunchUrl() comment.
+      raise Exception('Launching with URL not supported for bundles yet!')
+    _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+               command_line_flags_file=self.args.command_line_flags_file,
+               url=self.args.url, apk=self.apk_helper,
+               wait_for_java_debugger=self.args.wait_for_java_debugger,
+               debug_process_name=self.args.debug_process_name,
+               nokill=self.args.nokill)
+
+
+class _StopCommand(_Command):
+  name = 'stop'
+  description = 'Force-stops the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ForceStop(
+        self.args.package_name)
+
+
+class _ClearDataCommand(_Command):
+  name = 'clear-data'
+  descriptions = 'Clears all app data.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
+        self.args.package_name)
+
+
+class _ArgvCommand(_Command):
+  name = 'argv'
+  description = 'Display and optionally update command-line flags file.'
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _ChangeFlags(self.devices, self.args.args,
+                 self.args.command_line_flags_file)
+
+
+class _GdbCommand(_Command):
+  name = 'gdb'
+  description = 'Runs //build/android/adb_gdb with apk-specific args.'
+  long_description = description + """
+
+To attach to a process other than the APK's main process, use --pid=1234.
+To list all PIDs, use the "ps" command.
+
+If no apk process is currently running, sends a launch intent.
+"""
+  needs_package_name = True
+  needs_output_directory = True
+  calls_exec = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    _RunGdb(self.devices[0], self.args.package_name,
+            self.args.debug_process_name, self.args.pid,
+            self.args.output_directory, self.args.target_cpu, self.args.port,
+            self.args.ide, bool(self.args.verbose_count))
+
+  def _RegisterExtraArgs(self, group):
+    pid_group = group.add_mutually_exclusive_group()
+    pid_group.add_argument('--debug-process-name',
+                           help='Name of the process to attach to. '
+                                'E.g. "privileged_process0", or "foo.bar:baz"')
+    pid_group.add_argument('--pid',
+                           help='The process ID to attach to. Defaults to '
+                                'the main process for the package.')
+    group.add_argument('--ide', action='store_true',
+                       help='Rather than enter a gdb prompt, set up the '
+                            'gdb connection and wait for an IDE to '
+                            'connect.')
+    # Same default port that ndk-gdb.py uses.
+    group.add_argument('--port', type=int, default=5039,
+                       help='Use the given port for the GDB connection')
+
+
+class _LogcatCommand(_Command):
+  name = 'logcat'
+  description = 'Runs "adb logcat" with filters relevant the current APK.'
+  long_description = description + """
+
+"Relevant filters" means:
+  * Log messages from processes belonging to the apk,
+  * Plus log messages from log tags: ActivityManager|DEBUG,
+  * Plus fatal logs from any process,
+  * Minus spamy dalvikvm logs (for pre-L devices).
+
+Colors:
+  * Primary process is white
+  * Other processes (gpu, renderer) are yellow
+  * Non-apk processes are grey
+  * UI thread has a bolded Thread-ID
+
+Java stack traces are detected and deobfuscated (for release builds).
+
+To disable filtering, (but keep coloring), use --verbose.
+"""
+  needs_package_name = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    deobfuscate = None
+    if self.args.proguard_mapping_path and not self.args.no_deobfuscate:
+      deobfuscate = deobfuscator.Deobfuscator(self.args.proguard_mapping_path)
+
+    stack_script_context = _StackScriptContext(
+        self.args.output_directory,
+        self.args.apk_path,
+        self.bundle_generation_info,
+        quiet=True)
+    try:
+      _RunLogcat(self.devices[0], self.args.package_name, stack_script_context,
+                 deobfuscate, bool(self.args.verbose_count))
+    except KeyboardInterrupt:
+      pass  # Don't show stack trace upon Ctrl-C
+    finally:
+      stack_script_context.Close()
+      if deobfuscate:
+        deobfuscate.Close()
+
+  def _RegisterExtraArgs(self, group):
+    if self._from_wrapper_script:
+      group.add_argument('--no-deobfuscate', action='store_true',
+          help='Disables ProGuard deobfuscation of logcat.')
+    else:
+      group.set_defaults(no_deobfuscate=False)
+      group.add_argument('--proguard-mapping-path',
+          help='Path to ProGuard map (enables deobfuscation)')
+
+
+class _PsCommand(_Command):
+  name = 'ps'
+  description = 'Show PIDs of any APK processes currently running.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunPs(self.devices, self.args.package_name)
+
+
+class _DiskUsageCommand(_Command):
+  name = 'disk-usage'
+  description = 'Show how much device storage is being consumed by the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunDiskUsage(self.devices, self.args.package_name)
+
+
+class _MemUsageCommand(_Command):
+  name = 'mem-usage'
+  description = 'Show memory usage of currently running APK processes.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('--query-app', action='store_true',
+        help='Do not add --local to "dumpsys meminfo". This will output '
+             'additional metrics (e.g. Context count), but also cause memory '
+             'to be used in order to gather the metrics.')
+
+  def Run(self):
+    _RunMemUsage(self.devices, self.args.package_name,
+                 query_app=self.args.query_app)
+
+
+class _ShellCommand(_Command):
+  name = 'shell'
+  description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
+                 '(via run-as). Useful for inspecting the app\'s data '
+                 'directory.')
+  needs_package_name = True
+
+  @property
+  def calls_exec(self):
+    return not self.args.cmd
+
+  @property
+  def supports_multiple_devices(self):
+    return not self.args.cmd
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'cmd', nargs=argparse.REMAINDER, help='Command to run.')
+
+  def Run(self):
+    _RunShell(self.devices, self.args.package_name, self.args.cmd)
+
+
+class _CompileDexCommand(_Command):
+  name = 'compile-dex'
+  description = ('Applicable only for Android N+. Forces .odex files to be '
+                 'compiled with the given compilation filter. To see existing '
+                 'filter, use "disk-usage" command.')
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'compilation_filter',
+        choices=['verify', 'quicken', 'space-profile', 'space',
+                 'speed-profile', 'speed'],
+        help='For WebView/Monochrome, use "speed". For other apks, use '
+             '"speed-profile".')
+
+  def Run(self):
+    _RunCompileDex(self.devices, self.args.package_name,
+                   self.args.compilation_filter)
+
+
+class _PrintCertsCommand(_Command):
+  name = 'print-certs'
+  description = 'Print info about certificates used to sign this APK.'
+  need_device_args = False
+  needs_apk_helper = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--full-cert',
+        action='store_true',
+        help=("Print the certificate's full signature, Base64-encoded. "
+              "Useful when configuring an Android image's "
+              "config_webview_packages.xml."))
+
+  def Run(self):
+    keytool = os.path.join(_JAVA_HOME, 'bin', 'keytool')
+    if self.is_bundle:
+      # Bundles are not signed until converted to .apks. The wrapper scripts
+      # record which key will be used to sign though.
+      with tempfile.NamedTemporaryFile() as f:
+        logging.warning('Bundles are not signed until turned into .apk files.')
+        logging.warning('Showing signing info based on associated keystore.')
+        cmd = [
+            keytool, '-exportcert', '-keystore',
+            self.bundle_generation_info.keystore_path, '-storepass',
+            self.bundle_generation_info.keystore_password, '-alias',
+            self.bundle_generation_info.keystore_alias, '-file', f.name
+        ]
+        subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+        cmd = [keytool, '-printcert', '-file', f.name]
+        logging.warning('Running: %s', ' '.join(cmd))
+        subprocess.check_call(cmd)
+        if self.args.full_cert:
+          # Redirect stderr to hide a keytool warning about using non-standard
+          # keystore format.
+          full_output = subprocess.check_output(
+              cmd + ['-rfc'], stderr=subprocess.STDOUT)
+    else:
+      cmd = [
+          build_tools.GetPath('apksigner'), 'verify', '--print-certs',
+          '--verbose', self.apk_helper.path
+      ]
+      logging.warning('Running: %s', ' '.join(cmd))
+      env = os.environ.copy()
+      env['PATH'] = os.path.pathsep.join(
+          [os.path.join(_JAVA_HOME, 'bin'),
+           env.get('PATH')])
+      stdout = subprocess.check_output(cmd, env=env)
+      print(stdout)
+      if self.args.full_cert:
+        if 'v1 scheme (JAR signing): true' not in stdout:
+          raise Exception(
+              'Cannot print full certificate because apk is not V1 signed.')
+
+        cmd = [keytool, '-printcert', '-jarfile', self.apk_helper.path, '-rfc']
+        # Redirect stderr to hide a keytool warning about using non-standard
+        # keystore format.
+        full_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+
+    if self.args.full_cert:
+      m = re.search(
+          r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+',
+          full_output, re.MULTILINE)
+      if not m:
+        raise Exception('Unable to parse certificate:\n{}'.format(full_output))
+      signature = re.sub(r'[\r\n]+', '', m.group(1))
+      print()
+      print('Full Signature:')
+      print(signature)
+
+
+class _ProfileCommand(_Command):
+  name = 'profile'
+  description = ('Run the simpleperf sampling CPU profiler on the currently-'
+                 'running APK. If --args is used, the extra arguments will be '
+                 'passed on to simpleperf; otherwise, the following default '
+                 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data')
+  needs_package_name = True
+  needs_output_directory = True
+  supports_multiple_devices = False
+  accepts_args = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--profile-process', default='browser',
+        help=('Which process to profile. This may be a process name or pid '
+              'such as you would get from running `%s ps`; or '
+              'it can be one of (browser, renderer, gpu).' % sys.argv[0]))
+    group.add_argument(
+        '--profile-thread', default=None,
+        help=('(Optional) Profile only a single thread. This may be either a '
+              'thread ID such as you would get by running `adb shell ps -t` '
+              '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may '
+              'be one of (io, compositor, main, render), in which case '
+              '--profile-process is also required. (Note that "render" thread '
+              'refers to a thread in the browser process that manages a '
+              'renderer; to profile the main thread of the renderer process, '
+              'use --profile-thread=main).'))
+    group.add_argument('--profile-output', default='profile.pb',
+                       help='Output file for profiling data')
+
+  def Run(self):
+    extra_args = shlex.split(self.args.args or '')
+    _RunProfile(self.devices[0], self.args.package_name,
+                self.args.output_directory, self.args.profile_output,
+                self.args.profile_process, self.args.profile_thread,
+                extra_args)
+
+
+class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand):
+  name = 'run'
+  description = 'Install, launch, and show logcat (when targeting one device).'
+  all_devices_by_default = False
+  supports_multiple_devices = True
+
+  def _RegisterExtraArgs(self, group):
+    _InstallCommand._RegisterExtraArgs(self, group)
+    _LaunchCommand._RegisterExtraArgs(self, group)
+    _LogcatCommand._RegisterExtraArgs(self, group)
+    group.add_argument('--no-logcat', action='store_true',
+                       help='Install and launch, but do not enter logcat.')
+
+  def Run(self):
+    logging.warning('Installing...')
+    _InstallCommand.Run(self)
+    logging.warning('Sending launch intent...')
+    _LaunchCommand.Run(self)
+    if len(self.devices) == 1 and not self.args.no_logcat:
+      logging.warning('Entering logcat...')
+      _LogcatCommand.Run(self)
+
+
+class _BuildBundleApks(_Command):
+  name = 'build-bundle-apks'
+  description = ('Build the .apks archive from an Android app bundle, and '
+                 'optionally copy it to a specific destination.')
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--output-apks', required=True, help='Destination path for .apks file.')
+    group.add_argument(
+        '--minimal',
+        action='store_true',
+        help='Build .apks archive that targets the bundle\'s minSdkVersion and '
+        'contains only english splits. It still contains optional splits.')
+    group.add_argument(
+        '--sdk-version', help='The sdkVersion to build the .apks for.')
+    group.add_argument(
+        '--build-mode',
+        choices=app_bundle_utils.BUILD_APKS_MODES,
+        help='Specify which type of APKs archive to build. "default" '
+        'generates regular splits, "universal" generates an archive with a '
+        'single universal APK, "system" generates an archive with a system '
+        'image APK, while "system_compressed" generates a compressed system '
+        'APK, with an additional stub APK for the system image.')
+    group.add_argument(
+        '--optimize-for',
+        choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS,
+        help='Override split configuration.')
+
+  def Run(self):
+    _GenerateBundleApks(
+        self.bundle_generation_info,
+        output_path=self.args.output_apks,
+        minimal=self.args.minimal,
+        minimal_sdk_version=self.args.sdk_version,
+        mode=self.args.build_mode,
+        optimize_for=self.args.optimize_for)
+
+
+class _ManifestCommand(_Command):
+  name = 'dump-manifest'
+  description = 'Dump the android manifest from this bundle, as XML, to stdout.'
+  need_device_args = False
+
+  def Run(self):
+    bundletool.RunBundleTool([
+        'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
+    ])
+
+
+class _StackCommand(_Command):
+  name = 'stack'
+  description = 'Decodes an Android stack.'
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'file',
+        nargs='?',
+        help='File to decode. If not specified, stdin is processed.')
+
+  def Run(self):
+    context = _StackScriptContext(self.args.output_directory,
+                                  self.args.apk_path,
+                                  self.bundle_generation_info)
+    try:
+      proc = context.Popen(input_file=self.args.file)
+      if proc.wait():
+        raise Exception('stack script returned {}'.format(proc.returncode))
+    finally:
+      context.Close()
+
+
+# Shared commands for regular APKs and app bundles.
+_COMMANDS = [
+    _DevicesCommand,
+    _PackageInfoCommand,
+    _InstallCommand,
+    _UninstallCommand,
+    _SetWebViewProviderCommand,
+    _LaunchCommand,
+    _StopCommand,
+    _ClearDataCommand,
+    _ArgvCommand,
+    _GdbCommand,
+    _LogcatCommand,
+    _PsCommand,
+    _DiskUsageCommand,
+    _MemUsageCommand,
+    _ShellCommand,
+    _CompileDexCommand,
+    _PrintCertsCommand,
+    _ProfileCommand,
+    _RunCommand,
+    _StackCommand,
+]
+
+# Commands specific to app bundles.
+_BUNDLE_COMMANDS = [
+    _BuildBundleApks,
+    _ManifestCommand,
+]
+
+
+def _ParseArgs(parser, from_wrapper_script, is_bundle):
+  subparsers = parser.add_subparsers()
+  command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else [])
+  commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list]
+
+  for command in commands:
+    if from_wrapper_script or not command.needs_output_directory:
+      command.RegisterArgs(subparsers)
+
+  # Show extended help when no command is passed.
+  argv = sys.argv[1:]
+  if not argv:
+    argv = ['--help']
+
+  return parser.parse_args(argv)
+
+
+def _RunInternal(parser,
+                 output_directory=None,
+                 additional_apk_paths=None,
+                 bundle_generation_info=None):
+  colorama.init()
+  parser.set_defaults(
+      additional_apk_paths=additional_apk_paths,
+      output_directory=output_directory)
+  from_wrapper_script = bool(output_directory)
+  args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info))
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  if bundle_generation_info:
+    args.command.RegisterBundleGenerationInfo(bundle_generation_info)
+  if args.additional_apk_paths:
+    for path in additional_apk_paths:
+      if not path or not os.path.exists(path):
+        raise Exception('Invalid additional APK path "{}"'.format(path))
+  args.command.ProcessArgs(args)
+  args.command.Run()
+  # Incremental install depends on the cache being cleared when uninstalling.
+  if args.command.name != 'uninstall':
+    _SaveDeviceCaches(args.command.devices, output_directory)
+
+
+def Run(output_directory, apk_path, additional_apk_paths, incremental_json,
+        command_line_flags_file, target_cpu, proguard_mapping_path):
+  """Entry point for generated wrapper scripts."""
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  parser = argparse.ArgumentParser()
+  exists_or_none = lambda p: p if p and os.path.exists(p) else None
+
+  parser.set_defaults(
+      command_line_flags_file=command_line_flags_file,
+      target_cpu=target_cpu,
+      apk_path=exists_or_none(apk_path),
+      incremental_json=exists_or_none(incremental_json),
+      proguard_mapping_path=proguard_mapping_path)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths)
+
+
+def RunForBundle(output_directory, bundle_path, bundle_apks_path,
+                 additional_apk_paths, aapt2_path, keystore_path,
+                 keystore_password, keystore_alias, package_name,
+                 command_line_flags_file, proguard_mapping_path, target_cpu,
+                 system_image_locales, default_modules):
+  """Entry point for generated app bundle wrapper scripts.
+
+  Args:
+    output_dir: Chromium output directory path.
+    bundle_path: Input bundle path.
+    bundle_apks_path: Output bundle .apks archive path.
+    additional_apk_paths: Additional APKs to install prior to bundle install.
+    aapt2_path: Aapt2 tool path.
+    keystore_path: Keystore file path.
+    keystore_password: Keystore password.
+    keystore_alias: Signing key name alias in keystore file.
+    package_name: Application's package name.
+    command_line_flags_file: Optional. Name of an on-device file that will be
+      used to store command-line flags for this bundle.
+    proguard_mapping_path: Input path to the Proguard mapping file, used to
+      deobfuscate Java stack traces.
+    target_cpu: Chromium target CPU name, used by the 'gdb' command.
+    system_image_locales: List of Chromium locales that should be included in
+      system image APKs.
+    default_modules: List of modules that are installed in addition to those
+      given by the '-m' switch.
+  """
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  bundle_generation_info = BundleGenerationInfo(
+      bundle_path=bundle_path,
+      bundle_apks_path=bundle_apks_path,
+      aapt2_path=aapt2_path,
+      keystore_path=keystore_path,
+      keystore_password=keystore_password,
+      keystore_alias=keystore_alias,
+      system_image_locales=system_image_locales)
+  _InstallCommand.default_modules = default_modules
+
+  parser = argparse.ArgumentParser()
+  parser.set_defaults(
+      package_name=package_name,
+      command_line_flags_file=command_line_flags_file,
+      proguard_mapping_path=proguard_mapping_path,
+      target_cpu=target_cpu)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths,
+      bundle_generation_info=bundle_generation_info)
+
+
+def main():
+  devil_chromium.Initialize()
+  _RunInternal(argparse.ArgumentParser())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/apk_operations.pydeps b/src/build/android/apk_operations.pydeps
new file mode 100644
index 0000000..60b1289
--- /dev/null
+++ b/src/build/android/apk_operations.pydeps
@@ -0,0 +1,110 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/apk_operations.pydeps build/android/apk_operations.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/six/six.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../gn_helpers.py
+../print_python_deps.py
+adb_command_line.py
+apk_operations.py
+convert_dex_profile.py
+devil_chromium.py
+gyp/bundletool.py
+gyp/dex.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+gyp/util/resource_utils.py
+gyp/util/zipalign.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/utils/__init__.py
+pylib/utils/app_bundle_utils.py
+pylib/utils/simpleperf.py
+pylib/utils/time_profile.py
diff --git a/src/build/android/apply_shared_preference_file.py b/src/build/android/apply_shared_preference_file.py
new file mode 100755
index 0000000..187bf18
--- /dev/null
+++ b/src/build/android/apply_shared_preference_file.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manually applies a shared preference JSON file.
+
+If needed during automation, use the --shared-prefs-file in test_runner.py
+instead.
+"""
+
+import argparse
+import sys
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+
+from devil.android import device_utils
+from devil.android.sdk import shared_prefs
+from pylib.utils import shared_preference_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Manually apply shared preference JSON files.')
+  parser.add_argument('filepaths', nargs='*',
+                      help='Any number of paths to shared preference JSON '
+                           'files to apply.')
+  args = parser.parse_args()
+
+  all_devices = device_utils.DeviceUtils.HealthyDevices()
+  if not all_devices:
+    raise RuntimeError('No healthy devices attached')
+
+  for filepath in args.filepaths:
+    all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath)
+    for setting in all_settings:
+      for device in all_devices:
+        shared_pref = shared_prefs.SharedPrefs(
+            device, setting['package'], setting['filename'],
+            use_encrypted_path=setting.get('supports_encrypted_path', False))
+        shared_preference_utils.ApplySharedPreferenceSetting(
+            shared_pref, setting)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/asan_symbolize.py b/src/build/android/asan_symbolize.py
new file mode 100755
index 0000000..6585089
--- /dev/null
+++ b/src/build/android/asan_symbolize.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+# pylint: disable=wrong-import-order
+# Uses symbol.py from third_party/android_platform, not python's.
+with host_paths.SysPath(
+    host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+    position=0):
+  import symbol
+
+
+_RE_ASAN = re.compile(
+    r"""
+    (?P<prefix>.*?)
+    (?P<pos>\#\S*?)          # position of the call in stack.
+                             # escape the char "#" due to the VERBOSE flag.
+    \s+(\S*?)\s+
+    \(                       # match the char "(".
+        (?P<lib>.*?)         # library path.
+        \+0[xX](?P<addr>.*?) # address of the symbol in hex.
+                             # the prefix "0x" is skipped.
+    \)                       # match the char ")".
+    """, re.VERBOSE)
+
+# This named tuple models a parsed Asan log line.
+AsanParsedLine = collections.namedtuple('AsanParsedLine',
+                                        'prefix,library,pos,rel_address')
+
+# This named tuple models an Asan log line. 'raw' is the raw content
+# while 'parsed' is None or an AsanParsedLine instance.
+AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed')
+
+def _ParseAsanLogLine(line):
+  """Parse line into corresponding AsanParsedLine value, if any, or None."""
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return AsanParsedLine(prefix=m.group('prefix'),
+                        library=m.group('lib'),
+                        pos=m.group('pos'),
+                        rel_address='%08x' % int(m.group('addr'), 16))
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  # pylint: disable=no-member
+  return symbol.TranslateLibPath(library)
+
+
+def _PrintSymbolized(asan_input, arch):
+  """Print symbolized logcat output for Asan symbols.
+
+  Args:
+    asan_input: list of input lines.
+    arch: Target CPU architecture.
+  """
+  asan_libs = _FindASanLibraries()
+
+  # Maps library -> [ AsanParsedLine... ]
+  libraries = collections.defaultdict(list)
+
+  asan_log_lines = []
+  for line in asan_input:
+    line = line.rstrip()
+    parsed = _ParseAsanLogLine(line)
+    if parsed:
+      libraries[parsed.library].append(parsed)
+    asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed))
+
+  # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
+  all_symbols = collections.defaultdict(dict)
+
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i.rel_address for i in items])
+    # pylint: disable=no-member
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True,
+                                               cpu_arch=arch)
+    if info_dict:
+      all_symbols[library] = info_dict
+
+  for log_line in asan_log_lines:
+    m = log_line.parsed
+    if (m and m.library in all_symbols and
+        m.rel_address in all_symbols[m.library]):
+      # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples.
+      # NOTE: The documentation for SymbolInformationForSet() indicates
+      # that usually one wants to display the last list item, not the first.
+      # The code below takes the first, is this the best choice here?
+      s = all_symbols[m.library][m.rel_address][0]
+      print('%s%s %s %s' % (m.prefix, m.pos, s[0], s[1]))
+    else:
+      print(log_line.raw)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  parser.add_option('--output-directory',
+                    help='Path to the root build directory.')
+  parser.add_option('--arch', default='arm',
+                    help='CPU architecture name')
+  options, _ = parser.parse_args()
+
+  if options.output_directory:
+    constants.SetOutputDirectory(options.output_directory)
+  # Do an up-front test that the output directory is known.
+  constants.CheckOutputDirectory()
+
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+
+  _PrintSymbolized(asan_input.readlines(), options.arch)
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/android/bytecode/BUILD.gn b/src/build/android/bytecode/BUILD.gn
new file mode 100644
index 0000000..36b5432
--- /dev/null
+++ b/src/build/android/bytecode/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_binary("bytecode_processor") {
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeProcessor.java",
+    "java/org/chromium/bytecode/ClassPathValidator.java",
+    "java/org/chromium/bytecode/TypeUtils.java",
+  ]
+  main_class = "org.chromium.bytecode.ByteCodeProcessor"
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+  wrapper_script_name = "helper/bytecode_processor"
+  enable_bytecode_checks = false
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`.
+java_binary("fragment_activity_replacer") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer"
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`  followed by a cast to FragmentActivity.
+# Prefer :fragment_activity_replacer. This rewriter should only be used for
+# libraries that rely on getActivity() returning a FragmentActivity *and* are
+# not going to be used in an app that contains multiple copies of the AndroidX
+# Fragment library (i.e. WebLayer).
+java_binary("fragment_activity_replacer_single_androidx") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer_single_androidx"
+  wrapper_script_args = [ "--single-androidx" ]
+}
+
+java_library("fragment_activity_replacer_java") {
+  visibility = [ ":*" ]
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeRewriter.java",
+    "java/org/chromium/bytecode/FragmentActivityReplacer.java",
+  ]
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_commons_java",
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
new file mode 100644
index 0000000..b767f4f
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -0,0 +1,167 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ * Java application that takes in an input jar, performs a series of bytecode
+ * transformations, and generates an output jar.
+ */
+class ByteCodeProcessor {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+    private static final int BUFFER_SIZE = 16384;
+    private static boolean sVerbose;
+    private static boolean sIsPrebuilt;
+    private static ClassLoader sDirectClassPathClassLoader;
+    private static ClassLoader sFullClassPathClassLoader;
+    private static Set<String> sFullClassPathJarPaths;
+    private static Set<String> sMissingClassesAllowlist;
+    private static Map<String, String> sJarToGnTarget;
+    private static ClassPathValidator sValidator;
+
+    private static Void processEntry(ZipEntry entry, byte[] data) {
+        ClassReader reader = new ClassReader(data);
+        if (sIsPrebuilt) {
+            sValidator.validateFullClassPath(
+                    reader, sFullClassPathClassLoader, sMissingClassesAllowlist);
+        } else {
+            sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader,
+                    sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist,
+                    sVerbose);
+        }
+        return null;
+    }
+
+    private static void process(String gnTarget, String inputJarPath)
+            throws ExecutionException, InterruptedException {
+        ExecutorService executorService =
+                Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+        try (ZipInputStream inputStream = new ZipInputStream(
+                     new BufferedInputStream(new FileInputStream(inputJarPath)))) {
+            while (true) {
+                ZipEntry entry = inputStream.getNextEntry();
+                if (entry == null) {
+                    break;
+                }
+                byte[] data = readAllBytes(inputStream);
+                executorService.submit(() -> processEntry(entry, data));
+            }
+            executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+            executorService.awaitTermination(1, TimeUnit.HOURS);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        if (sValidator.hasErrors()) {
+            sValidator.printAll(gnTarget, sJarToGnTarget);
+            System.exit(1);
+        }
+    }
+
+    private static byte[] readAllBytes(InputStream inputStream) throws IOException {
+        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+        int numRead = 0;
+        byte[] data = new byte[BUFFER_SIZE];
+        while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
+            buffer.write(data, 0, numRead);
+        }
+        return buffer.toByteArray();
+    }
+
+    /**
+     * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
+     * given jars.
+     */
+    static ClassLoader loadJars(Collection<String> paths) {
+        URL[] jarUrls = new URL[paths.size()];
+        int i = 0;
+        for (String path : paths) {
+            try {
+                jarUrls[i++] = new File(path).toURI().toURL();
+            } catch (MalformedURLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        return new URLClassLoader(jarUrls);
+    }
+
+    /**
+     * Extracts a length-encoded list of strings from the arguments, and adds them to |out|. Returns
+     * the new "next index" to be processed.
+     */
+    private static int parseListArgument(String[] args, int index, Collection<String> out) {
+        int argLength = Integer.parseInt(args[index++]);
+        out.addAll(Arrays.asList(Arrays.copyOfRange(args, index, index + argLength)));
+        return index + argLength;
+    }
+
+    public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+                                                  ExecutionException, InterruptedException {
+        // Invoke this script using //build/android/gyp/bytecode_processor.py
+        int currIndex = 0;
+        String gnTarget = args[currIndex++];
+        String inputJarPath = args[currIndex++];
+        sVerbose = args[currIndex++].equals("--verbose");
+        sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+
+        sMissingClassesAllowlist = new HashSet<>();
+        currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist);
+
+        ArrayList<String> sdkJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, sdkJarPaths);
+
+        ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+        directClassPathJarPaths.add(inputJarPath);
+        directClassPathJarPaths.addAll(sdkJarPaths);
+        currIndex = parseListArgument(args, currIndex, directClassPathJarPaths);
+        sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+
+        ArrayList<String> fullClassPathJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths);
+        ArrayList<String> gnTargets = new ArrayList<>();
+        parseListArgument(args, currIndex, gnTargets);
+        sJarToGnTarget = new HashMap<>();
+        assert fullClassPathJarPaths.size() == gnTargets.size();
+        for (int i = 0; i < fullClassPathJarPaths.size(); ++i) {
+            sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i));
+        }
+
+        // Load all jars that are on the classpath for the input jar for analyzing class
+        // hierarchy.
+        sFullClassPathJarPaths = new HashSet<>();
+        sFullClassPathJarPaths.add(inputJarPath);
+        sFullClassPathJarPaths.addAll(sdkJarPaths);
+        sFullClassPathJarPaths.addAll(fullClassPathJarPaths);
+        sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+        sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+        sValidator = new ClassPathValidator();
+        process(gnTarget, inputJarPath);
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
new file mode 100644
index 0000000..3d0d9cd
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
@@ -0,0 +1,91 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * Base class for scripts that perform bytecode modifications on a jar file.
+ */
+public abstract class ByteCodeRewriter {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+
+    public void rewrite(File inputJar, File outputJar) throws IOException {
+        if (!inputJar.exists()) {
+            throw new FileNotFoundException("Input jar not found: " + inputJar.getPath());
+        }
+        try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar))) {
+            try (OutputStream outputStream = new FileOutputStream(outputJar)) {
+                processZip(inputStream, outputStream);
+            }
+        }
+    }
+
+    /** Returns true if the class at the given path in the archive should be rewritten. */
+    protected abstract boolean shouldRewriteClass(String classPath);
+
+    /**
+     * Returns the ClassVisitor that should be used to modify the bytecode of class at the given
+     * path in the archive.
+     */
+    protected abstract ClassVisitor getClassVisitorForClass(
+            String classPath, ClassVisitor delegate);
+
+    private void processZip(InputStream inputStream, OutputStream outputStream) {
+        try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
+            ZipInputStream zipInputStream = new ZipInputStream(inputStream);
+            ZipEntry entry;
+            while ((entry = zipInputStream.getNextEntry()) != null) {
+                ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+                boolean handled = processClassEntry(entry, zipInputStream, buffer);
+                if (handled) {
+                    ZipEntry newEntry = new ZipEntry(entry.getName());
+                    zipOutputStream.putNextEntry(newEntry);
+                    zipOutputStream.write(buffer.toByteArray(), 0, buffer.size());
+                } else {
+                    zipOutputStream.putNextEntry(entry);
+                    zipInputStream.transferTo(zipOutputStream);
+                }
+            }
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private boolean processClassEntry(
+            ZipEntry entry, InputStream inputStream, OutputStream outputStream) {
+        if (!entry.getName().endsWith(CLASS_FILE_SUFFIX) || !shouldRewriteClass(entry.getName())) {
+            return false;
+        }
+        try {
+            ClassReader reader = new ClassReader(inputStream);
+            ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES);
+            ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer);
+            reader.accept(classVisitor, ClassReader.EXPAND_FRAMES);
+
+            writer.visitEnd();
+            byte[] classData = writer.toByteArray();
+            outputStream.write(classData, 0, classData.length);
+            return true;
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 0000000..9f45df5
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,233 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.PrintStream;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.function.Consumer;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+    // Number of warnings to print.
+    private static final int MAX_MISSING_CLASS_WARNINGS = 10;
+    // Number of missing classes to show per missing jar.
+    private static final int MAX_ERRORS_PER_JAR = 2;
+    // Map of missing .jar -> Missing class -> Classes that failed.
+    // TreeMap so that error messages have sorted list of jars.
+    private final Map<String, Map<String, Set<String>>> mDirectErrors =
+            Collections.synchronizedMap(new TreeMap<>());
+    // Missing classes we only track the first one for each jar.
+    // Map of missingClass -> srcClass.
+    private final Map<String, String> mMissingClasses =
+            Collections.synchronizedMap(new TreeMap<>());
+
+    static class ClassNotLoadedException extends ClassNotFoundException {
+        private final String mClassName;
+
+        ClassNotLoadedException(String className, Throwable ex) {
+            super("Couldn't load " + className, ex);
+            mClassName = className;
+        }
+
+        public String getClassName() {
+            return mClassName;
+        }
+    }
+
+    private static void validateClass(ClassLoader classLoader, String className)
+            throws ClassNotLoadedException {
+        if (className.startsWith("[")) {
+            // Dealing with an array type which isn't encoded nicely in the constant pool.
+            // For example, [[Lorg/chromium/Class$1;
+            className = className.substring(className.lastIndexOf('[') + 1);
+            if (className.charAt(0) == 'L' && className.endsWith(";")) {
+                className = className.substring(1, className.length() - 1);
+            } else {
+                // Bailing out if we have an non-class array type.
+                // This could be something like [B
+                return;
+            }
+        }
+        if (className.matches(".*\\bR(\\$\\w+)?$")) {
+            // Resources in R.java files are not expected to be valid at this stage in the build.
+            return;
+        }
+        if (className.matches("^libcore\\b.*")) {
+            // libcore exists on devices, but is not included in the Android sdk as it is a private
+            // API.
+            return;
+        }
+        try {
+            classLoader.loadClass(className.replace('/', '.'));
+        } catch (ClassNotFoundException e) {
+            throw new ClassNotLoadedException(className, e);
+        } catch (NoClassDefFoundError e) {
+            // We assume that this is caused by another class that is not going to able to be
+            // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+        }
+    }
+
+    /**
+     * Given a .class file, see if every class referenced in the main class' constant pool can be
+     * loaded by the given ClassLoader.
+     *
+     * @param classReader .class file interface for reading the constant pool.
+     * @param classLoader classpath you wish to validate.
+     * @param errorConsumer Called for each missing class.
+     */
+    private static void validateClassPath(ClassReader classReader, ClassLoader classLoader,
+            Consumer<ClassNotLoadedException> errorConsumer) {
+        char[] charBuffer = new char[classReader.getMaxStringLength()];
+        // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+        // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+        for (int i = 1; i < classReader.getItemCount(); i++) {
+            int offset = classReader.getItem(i);
+            // Class entries correspond to 7 in the constant pool
+            // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+            if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+                try {
+                    validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+                } catch (ClassNotLoadedException e) {
+                    errorConsumer.accept(e);
+                }
+            }
+        }
+    }
+
+    public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader,
+            Set<String> missingClassAllowlist) {
+        // Prebuilts only need transitive dependencies checked, not direct dependencies.
+        validateClassPath(classReader, fullClassLoader, (e) -> {
+            if (!missingClassAllowlist.contains(e.getClassName())) {
+                addMissingError(classReader.getClassName(), e.getClassName());
+            }
+        });
+    }
+
+    public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader,
+            ClassLoader fullClassLoader, Collection<String> jarsOnlyInFullClassPath,
+            Set<String> missingClassAllowlist, boolean verbose) {
+        validateClassPath(classReader, directClassLoader, (e) -> {
+            try {
+                validateClass(fullClassLoader, e.getClassName());
+            } catch (ClassNotLoadedException d) {
+                if (!missingClassAllowlist.contains(e.getClassName())) {
+                    addMissingError(classReader.getClassName(), e.getClassName());
+                }
+                return;
+            }
+            if (verbose) {
+                System.err.println("Class \"" + e.getClassName()
+                        + "\" not found in direct dependencies,"
+                        + " but found in indirect dependiences.");
+            }
+            // Iterating through all jars that are in the full classpath but not the direct
+            // classpath to find which one provides the class we are looking for.
+            for (String jarPath : jarsOnlyInFullClassPath) {
+                try {
+                    ClassLoader smallLoader =
+                            ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+                    validateClass(smallLoader, e.getClassName());
+                    addDirectError(jarPath, classReader.getClassName(), e.getClassName());
+                    break;
+                } catch (ClassNotLoadedException f) {
+                }
+            }
+        });
+    }
+
+    private void addMissingError(String srcClass, String missingClass) {
+        mMissingClasses.put(missingClass, srcClass);
+    }
+
+    private void addDirectError(String jarPath, String srcClass, String missingClass) {
+        synchronized (mDirectErrors) {
+            Map<String, Set<String>> failedClassesByMissingClass = mDirectErrors.get(jarPath);
+            if (failedClassesByMissingClass == null) {
+                // TreeMap so that error messages have sorted list of classes.
+                failedClassesByMissingClass = new TreeMap<>();
+                mDirectErrors.put(jarPath, failedClassesByMissingClass);
+            }
+            Set<String> failedClasses = failedClassesByMissingClass.get(missingClass);
+            if (failedClasses == null) {
+                failedClasses = new TreeSet<>();
+                failedClassesByMissingClass.put(missingClass, failedClasses);
+            }
+            failedClasses.add(srcClass);
+        }
+    }
+
+    public boolean hasErrors() {
+        return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty();
+    }
+
+    private static void printValidationError(
+            PrintStream out, String gnTarget, Map<String, Set<String>> missingClasses) {
+        out.print(" * ");
+        out.println(gnTarget);
+        int i = 0;
+        // The list of missing classes is non-exhaustive because each class that fails to validate
+        // reports only the first missing class.
+        for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+            String missingClass = entry.getKey();
+            Set<String> filesThatNeededIt = entry.getValue();
+            out.print("     * ");
+            if (i == MAX_ERRORS_PER_JAR) {
+                out.print(String.format(
+                        "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR));
+                break;
+            }
+            out.print(missingClass.replace('/', '.'));
+            out.print(" (needed by ");
+            out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+            if (filesThatNeededIt.size() > 1) {
+                out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+            }
+            out.println(")");
+            i++;
+        }
+    }
+
+    public void printAll(String gnTarget, Map<String, String> jarToGnTarget) {
+        String streamer = "=============================";
+        System.err.println();
+        System.err.println(streamer + " Dependency Checks Failed " + streamer);
+        System.err.println("Target: " + gnTarget);
+        if (!mMissingClasses.isEmpty()) {
+            int i = 0;
+            for (Map.Entry<String, String> entry : mMissingClasses.entrySet()) {
+                if (++i > MAX_MISSING_CLASS_WARNINGS) {
+                    System.err.println(String.format("... and %d more.",
+                            mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS));
+                    break;
+                }
+                System.err.println(String.format(
+                        "Class \"%s\" not found on any classpath. Used by class \"%s\"",
+                        entry.getKey(), entry.getValue()));
+            }
+            System.err.println();
+        }
+        if (!mDirectErrors.isEmpty()) {
+            System.err.println("Direct classpath is incomplete. To fix, add deps on:");
+            for (Map.Entry<String, Map<String, Set<String>>> entry : mDirectErrors.entrySet()) {
+                printValidationError(
+                        System.err, jarToGnTarget.get(entry.getKey()), entry.getValue());
+            }
+            System.err.println();
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
new file mode 100644
index 0000000..a40f39c
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
@@ -0,0 +1,238 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.commons.MethodRemapper;
+import org.objectweb.asm.commons.Remapper;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Java application that modifies Fragment.getActivity() to return an Activity instead of a
+ * FragmentActivity, and updates any existing getActivity() calls to reference the updated method.
+ *
+ * See crbug.com/1144345 for more context.
+ */
+public class FragmentActivityReplacer extends ByteCodeRewriter {
+    private static final String GET_ACTIVITY_METHOD_NAME = "getActivity";
+    private static final String GET_LIFECYCLE_ACTIVITY_METHOD_NAME = "getLifecycleActivity";
+    private static final String NEW_METHOD_DESCRIPTOR = "()Landroid/app/Activity;";
+    private static final String OLD_METHOD_DESCRIPTOR =
+            "()Landroidx/fragment/app/FragmentActivity;";
+    private static final String REQUIRE_ACTIVITY_METHOD_NAME = "requireActivity";
+    private static final String SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME =
+            "com.google.android.gms.common.api.internal.SupportLifecycleFragmentImpl";
+
+    public static void main(String[] args) throws IOException {
+        // Invoke this script using //build/android/gyp/bytecode_rewriter.py
+        if (!(args.length == 2 || args.length == 3 && args[0].equals("--single-androidx"))) {
+            System.err.println("Expected arguments: [--single-androidx] <input.jar> <output.jar>");
+            System.exit(1);
+        }
+
+        if (args.length == 2) {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(false);
+            rewriter.rewrite(new File(args[0]), new File(args[1]));
+        } else {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(true);
+            rewriter.rewrite(new File(args[1]), new File(args[2]));
+        }
+    }
+
+    private final boolean mSingleAndroidX;
+
+    public FragmentActivityReplacer(boolean singleAndroidX) {
+        mSingleAndroidX = singleAndroidX;
+    }
+
+    @Override
+    protected boolean shouldRewriteClass(String classPath) {
+        return true;
+    }
+
+    @Override
+    protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) {
+        ClassVisitor invocationVisitor = new InvocationReplacer(delegate, mSingleAndroidX);
+        switch (classPath) {
+            case "androidx/fragment/app/Fragment.class":
+                return new FragmentClassVisitor(invocationVisitor);
+            case "com/google/android/gms/common/api/internal/SupportLifecycleFragmentImpl.class":
+                return new SupportLifecycleFragmentImplClassVisitor(invocationVisitor);
+            default:
+                return invocationVisitor;
+        }
+    }
+
+    /**
+     * Updates any Fragment.getActivity/requireActivity() or getLifecycleActivity() calls to call
+     * the replaced method.
+     */
+    private static class InvocationReplacer extends ClassVisitor {
+        private final boolean mSingleAndroidX;
+
+        private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) {
+            super(Opcodes.ASM7, baseVisitor);
+            mSingleAndroidX = singleAndroidX;
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            MethodVisitor base = super.visitMethod(access, name, descriptor, signature, exceptions);
+            return new MethodVisitor(Opcodes.ASM7, base) {
+                @Override
+                public void visitMethodInsn(int opcode, String owner, String name,
+                        String descriptor, boolean isInterface) {
+                    boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isFragmentRequireActivity = name.equals(REQUIRE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isSupportLifecycleFragmentImplGetLifecycleActivity =
+                            name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME);
+                    if ((opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
+                            && (isFragmentGetActivity || isFragmentRequireActivity
+                                    || isSupportLifecycleFragmentImplGetLifecycleActivity)) {
+                        super.visitMethodInsn(
+                                opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface);
+                        if (mSingleAndroidX) {
+                            super.visitTypeInsn(
+                                    Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity");
+                        }
+                    } else {
+                        super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
+                    }
+                }
+
+                private boolean isFragmentSubclass(String internalType) {
+                    // Look up classes with a ClassLoader that will resolve any R classes to Object.
+                    // This is fine in this case as resource classes shouldn't be in the class
+                    // hierarchy of any Fragments.
+                    ClassLoader resourceStubbingClassLoader = new ClassLoader() {
+                        @Override
+                        protected Class<?> findClass(String name) throws ClassNotFoundException {
+                            if (name.matches(".*\\.R(\\$.+)?")) {
+                                return Object.class;
+                            }
+                            return super.findClass(name);
+                        }
+                    };
+
+                    // This doesn't use Class#isAssignableFrom to avoid us needing to load
+                    // AndroidX's Fragment class, which may not be on the classpath.
+                    try {
+                        String binaryName = Type.getObjectType(internalType).getClassName();
+                        Class<?> clazz = resourceStubbingClassLoader.loadClass(binaryName);
+                        while (clazz != null) {
+                            if (clazz.getName().equals("androidx.fragment.app.Fragment")) {
+                                return true;
+                            }
+                            clazz = clazz.getSuperclass();
+                        }
+                        return false;
+                    } catch (ClassNotFoundException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+            };
+        }
+    }
+
+    /**
+     * Updates the implementation of Fragment.getActivity() and Fragment.requireActivity().
+     */
+    private static class FragmentClassVisitor extends ClassVisitor {
+        private FragmentClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // Update the descriptor of getActivity() and requireActivity().
+            MethodVisitor baseVisitor;
+            if (descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                    && (name.equals(GET_ACTIVITY_METHOD_NAME)
+                            || name.equals(REQUIRE_ACTIVITY_METHOD_NAME))) {
+                // Some Fragments in a Clank library implement an interface that defines an
+                // `Activity getActivity()` method. Fragment.getActivity() is considered its
+                // implementation from a typechecking perspective, but javac still generates a
+                // getActivity() method in these Fragments that call Fragment.getActivity(). This
+                // isn't an issue when the methods return different types, but after changing
+                // Fragment.getActivity() to return an Activity, this generated implementation is
+                // now overriding Fragment's, which it can't do because Fragment.getActivity() is
+                // final. We make it non-final here to avoid this issue.
+                baseVisitor = super.visitMethod(
+                        access & ~Opcodes.ACC_FINAL, name, NEW_METHOD_DESCRIPTOR, null, exceptions);
+            } else {
+                baseVisitor = super.visitMethod(access, name, descriptor, signature, exceptions);
+            }
+
+            // Replace getActivity() with `return ContextUtils.activityFromContext(getContext());`
+            if (name.equals(GET_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                baseVisitor.visitVarInsn(Opcodes.ALOAD, 0);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "androidx/fragment/app/Fragment",
+                        "getContext", "()Landroid/content/Context;", false);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "org/chromium/utils/ContextUtils",
+                        "activityFromContext", "(Landroid/content/Context;)Landroid/app/Activity;",
+                        false);
+                baseVisitor.visitInsn(Opcodes.ARETURN);
+                // Since we set COMPUTE_FRAMES, the arguments of visitMaxs are ignored, but calling
+                // it forces ClassWriter to actually recompute the correct stack/local values.
+                // Without this call ClassWriter keeps the original stack=0,locals=1 which is wrong.
+                baseVisitor.visitMaxs(0, 0);
+                return null;
+            }
+
+            return new MethodRemapper(baseVisitor, new Remapper() {
+                @Override
+                public String mapType(String internalName) {
+                    if (internalName.equals("androidx/fragment/app/FragmentActivity")) {
+                        return "android/app/Activity";
+                    }
+                    return internalName;
+                }
+            });
+        }
+    }
+
+    /**
+     * Update SupportLifecycleFragmentImpl.getLifecycleActivity().
+     */
+    private static class SupportLifecycleFragmentImplClassVisitor extends ClassVisitor {
+        private SupportLifecycleFragmentImplClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // SupportLifecycleFragmentImpl has two getActivity methods:
+            //   1. public FragmentActivity getLifecycleActivity():
+            //      This is what you'll see in the source. This delegates to Fragment.getActivity().
+            //   2. public Activity getLifecycleActivity():
+            //      This is generated because the class implements LifecycleFragment, which
+            //      declares this method, and delegates to #1.
+            //
+            // Here we change the return type of #1 and delete #2.
+            if (name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)) {
+                if (descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                    return super.visitMethod(
+                            access, name, NEW_METHOD_DESCRIPTOR, signature, exceptions);
+                }
+                return null;
+            }
+            return super.visitMethod(access, name, descriptor, signature, exceptions);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
new file mode 100644
index 0000000..ed2dc2d
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
@@ -0,0 +1,87 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.Type;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for accessing {@link Type}s Strings.
+ *
+ * Useful definitions to keep in mind when using this class:
+ * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really
+ * relevant for primitive types.
+ * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types.
+ *
+ * The methods in this class accept internal names or primitive type descriptors.
+ */
+class TypeUtils {
+    static final String ASSERTION_ERROR = "java/lang/AssertionError";
+    static final String ASSET_MANAGER = "android/content/res/AssetManager";
+    static final String BUILD_HOOKS = "org/chromium/build/BuildHooks";
+    static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid";
+    static final String CONFIGURATION = "android/content/res/Configuration";
+    static final String CONTEXT = "android/content/Context";
+    static final String CONTEXT_WRAPPER = "android/content/ContextWrapper";
+    static final String RESOURCES = "android/content/res/Resources";
+    static final String STRING = "java/lang/String";
+    static final String THEME = "android/content/res/Resources$Theme";
+
+    static final String BOOLEAN = "Z";
+    static final String INT = "I";
+    static final String VOID = "V";
+    private static final Map<String, Type> PRIMITIVE_DESCRIPTORS;
+    static {
+        PRIMITIVE_DESCRIPTORS = new HashMap<>();
+        PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE);
+    }
+
+    /**
+     * Returns the full method signature with internal names.
+     *
+     * @param methodName Name of the method (ex. "getResources").
+     * @param returnType Internal name for the return type.
+     * @param argumentTypes List of internal names for argument types.
+     * @return String representation of the method signature.
+     */
+    static String getMethodSignature(
+            String methodName, String returnType, String... argumentTypes) {
+        return methodName + getMethodDescriptor(returnType, argumentTypes);
+    }
+
+    /**
+     * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}.
+     *
+     * @param returnType Internal name for the return type of the method (primitive or class).
+     * @param argumentTypes Internal names for the argument types (primitive or class).
+     * @return The generated method descriptor.
+     */
+    static String getMethodDescriptor(String returnType, String... argumentTypes) {
+        Type[] typedArguments = new Type[argumentTypes.length];
+        for (int i = 0; i < argumentTypes.length; ++i) {
+            // Argument list should be empty in this case, not V (void).
+            assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]);
+            typedArguments[i] = convert(argumentTypes[i]);
+        }
+        return Type.getMethodDescriptor(convert(returnType), typedArguments);
+    }
+
+    /**
+     * Converts an internal name for a type to a {@link Type}.
+     *
+     * @param type Internal name for a type (primitive or class).
+     * @return The resulting Type.
+     */
+    private static Type convert(String type) {
+        if (PRIMITIVE_DESCRIPTORS.containsKey(type)) {
+            return PRIMITIVE_DESCRIPTORS.get(type);
+        }
+        return Type.getObjectType(type);
+    }
+}
diff --git a/src/build/android/chromium-debug.keystore b/src/build/android/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/src/build/android/chromium-debug.keystore
Binary files differ
diff --git a/src/build/android/convert_dex_profile.py b/src/build/android/convert_dex_profile.py
new file mode 100755
index 0000000..f9fdeb6
--- /dev/null
+++ b/src/build/android/convert_dex_profile.py
@@ -0,0 +1,557 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import re
+import subprocess
+import sys
+
+DEX_CLASS_NAME_RE = re.compile(r'\'L(?P<class_name>[^;]+);\'')
+DEX_METHOD_NAME_RE = re.compile(r'\'(?P<method_name>[^\']+)\'')
+DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re
+    r'\''
+    r'\('
+    r'(?P<method_params>[^)]*)'
+    r'\)'
+    r'(?P<method_return_type>[^\']+)'
+    r'\'')
+DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P<line_number>\d+)')
+
+PROFILE_METHOD_RE = re.compile(
+    r'(?P<tags>[HSP]+)' # tags such as H/S/P
+    r'(?P<class_name>L[^;]+;)' # class name in type descriptor format
+    r'->(?P<method_name>[^(]+)'
+    r'\((?P<method_params>[^)]*)\)'
+    r'(?P<method_return_type>.+)')
+
+PROGUARD_CLASS_MAPPING_RE = re.compile(
+    r'(?P<original_name>[^ ]+)'
+    r' -> '
+    r'(?P<obfuscated_name>[^:]+):')
+PROGUARD_METHOD_MAPPING_RE = re.compile(
+    # line_start:line_end: (optional)
+    r'((?P<line_start>\d+):(?P<line_end>\d+):)?'
+    r'(?P<return_type>[^ ]+)' # original method return type
+    # original method class name (if exists)
+    r' (?:(?P<original_method_class>[a-zA-Z_\d.$]+)\.)?'
+    r'(?P<original_method_name>[^.\(]+)'
+    r'\((?P<params>[^\)]*)\)' # original method params
+    r'(?:[^ ]*)' # original method line numbers (ignored)
+    r' -> '
+    r'(?P<obfuscated_name>.+)') # obfuscated method name
+
+TYPE_DESCRIPTOR_RE = re.compile(
+    r'(?P<brackets>\[*)'
+    r'(?:'
+    r'(?P<class_name>L[^;]+;)'
+    r'|'
+    r'[VZBSCIJFD]'
+    r')')
+
+DOT_NOTATION_MAP = {
+    '': '',
+    'boolean': 'Z',
+    'byte': 'B',
+    'void': 'V',
+    'short': 'S',
+    'char': 'C',
+    'int': 'I',
+    'long': 'J',
+    'float': 'F',
+    'double': 'D'
+}
+
+class Method(object):
+  def __init__(self, name, class_name, param_types=None, return_type=None):
+    self.name = name
+    self.class_name = class_name
+    self.param_types = param_types
+    self.return_type = return_type
+
+  def __str__(self):
+    return '{}->{}({}){}'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __repr__(self):
+    return 'Method<{}->{}({}){}>'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __cmp__(self, other):
+    return cmp((self.class_name, self.name, self.param_types, self.return_type),
+        (other.class_name, other.name, other.param_types, other.return_type))
+
+  def __hash__(self):
+    # only hash name and class_name since other fields may not be set yet.
+    return hash((self.name, self.class_name))
+
+
+class Class(object):
+  def __init__(self, name):
+    self.name = name
+    self._methods = []
+
+  def AddMethod(self, method, line_numbers):
+    self._methods.append((method, set(line_numbers)))
+
+  def FindMethodsAtLine(self, method_name, line_start, line_end=None):
+    """Searches through dex class for a method given a name and line numbers
+
+    The dex maps methods to line numbers, this method, given the a method name
+    in this class as well as a start line and an optional end line (which act as
+    hints as to which function in the class is being looked for), returns a list
+    of possible matches (or none if none are found).
+
+    Args:
+      method_name: name of method being searched for
+      line_start: start of hint range for lines in this method
+      line_end: end of hint range for lines in this method (optional)
+
+    Returns:
+      A list of Method objects that could match the hints given, or None if no
+      method is found.
+    """
+    found_methods = []
+    if line_end is None:
+      hint_lines = set([line_start])
+    else:
+      hint_lines = set(range(line_start, line_end+1))
+
+    named_methods = [(method, l) for method, l in self._methods
+                     if method.name == method_name]
+
+    if len(named_methods) == 1:
+      return [method for method, l in named_methods]
+    if len(named_methods) == 0:
+      return None
+
+    for method, line_numbers in named_methods:
+      if not hint_lines.isdisjoint(line_numbers):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+
+    for method, line_numbers in named_methods:
+      if (max(hint_lines) >= min(line_numbers)
+          and min(hint_lines) <= max(line_numbers)):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+    else:
+      logging.warning('No method named "%s" in class "%s" is '
+                      'mapped to lines %s', method_name, self.name, hint_lines)
+      return None
+
+
+class Profile(object):
+  def __init__(self):
+    # {Method: set(char)}
+    self._methods = collections.defaultdict(set)
+    self._classes = []
+
+  def AddMethod(self, method, tags):
+    for tag in tags:
+      self._methods[method].add(tag)
+
+  def AddClass(self, cls):
+    self._classes.append(cls)
+
+  def WriteToFile(self, path):
+    with open(path, 'w') as output_profile:
+      for cls in sorted(self._classes):
+        output_profile.write(cls + '\n')
+      for method in sorted(self._methods):
+        tags = sorted(self._methods[method])
+        line = '{}{}\n'.format(''.join(tags), str(method))
+        output_profile.write(line)
+
+
+class ProguardMapping(object):
+  def __init__(self):
+    # {Method: set(Method)}
+    self._method_mapping = collections.defaultdict(set)
+    # {String: String} String is class name in type descriptor format
+    self._class_mapping = dict()
+
+  def AddMethodMapping(self, from_method, to_method):
+    self._method_mapping[from_method].add(to_method)
+
+  def AddClassMapping(self, from_class, to_class):
+    self._class_mapping[from_class] = to_class
+
+  def GetMethodMapping(self, from_method):
+    return self._method_mapping.get(from_method)
+
+  def GetClassMapping(self, from_class):
+    return self._class_mapping.get(from_class, from_class)
+
+  def MapTypeDescriptor(self, type_descriptor):
+    match = TYPE_DESCRIPTOR_RE.search(type_descriptor)
+    assert match is not None
+    class_name = match.group('class_name')
+    if class_name is not None:
+      return match.group('brackets') + self.GetClassMapping(class_name)
+    # just a native type, return as is
+    return match.group()
+
+  def MapTypeDescriptorList(self, type_descriptor_list):
+    return TYPE_DESCRIPTOR_RE.sub(
+        lambda match: self.MapTypeDescriptor(match.group()),
+        type_descriptor_list)
+
+
+class MalformedLineException(Exception):
+  def __init__(self, message, line_number):
+    super(MalformedLineException, self).__init__(message)
+    self.line_number = line_number
+
+  def __str__(self):
+    return self.message + ' at line {}'.format(self.line_number)
+
+
+class MalformedProguardMappingException(MalformedLineException):
+  pass
+
+
+class MalformedProfileException(MalformedLineException):
+  pass
+
+
+def _RunDexDump(dexdump_path, dex_file_path):
+  return subprocess.check_output([dexdump_path, dex_file_path]).splitlines()
+
+
+def _ReadFile(file_path):
+  with open(file_path, 'r') as f:
+    return f.readlines()
+
+
+def _ToTypeDescriptor(dot_notation):
+  """Parses a dot notation type and returns it in type descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity;
+  boolean -> Z
+  int[] -> [I
+
+  Args:
+    dot_notation: trimmed string with a single type in dot notation format
+
+  Returns:
+    A string with the type in type descriptor format
+  """
+  dot_notation = dot_notation.strip()
+  prefix = ''
+  while dot_notation.endswith('[]'):
+    prefix += '['
+    dot_notation = dot_notation[:-2]
+  if dot_notation in DOT_NOTATION_MAP:
+    return prefix + DOT_NOTATION_MAP[dot_notation]
+  return prefix + 'L' + dot_notation.replace('.', '/') + ';'
+
+
+def _DotNotationListToTypeDescriptorList(dot_notation_list_string):
+  """Parses a param list of dot notation format and returns it in type
+  descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity,boolean,int[] ->
+      Lorg/chromium/browser/ChromeActivity;Z[I
+
+  Args:
+    dot_notation_list_string: single string with multiple comma separated types
+                              in dot notation format
+
+  Returns:
+    A string with the param list in type descriptor format
+  """
+  return ''.join(_ToTypeDescriptor(param) for param in
+      dot_notation_list_string.split(','))
+
+
+def ProcessDex(dex_dump):
+  """Parses dexdump output returning a dict of class names to Class objects
+
+  Parses output of the dexdump command on a dex file and extracts information
+  about classes and their respective methods and which line numbers a method is
+  mapped to.
+
+  Methods that are not mapped to any line number are ignored and not listed
+  inside their respective Class objects.
+
+  Args:
+    dex_dump: An array of lines of dexdump output
+
+  Returns:
+    A dict that maps from class names in type descriptor format (but without the
+    surrounding 'L' and ';') to Class objects.
+  """
+  # class_name: Class
+  classes_by_name = {}
+  current_class = None
+  current_method = None
+  reading_positions = False
+  reading_methods = False
+  method_line_numbers = []
+  for line in dex_dump:
+    line = line.strip()
+    if line.startswith('Class descriptor'):
+      # New class started, no longer reading methods.
+      reading_methods = False
+      current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name'))
+      classes_by_name[current_class.name] = current_class
+    elif (line.startswith('Direct methods')
+          or line.startswith('Virtual methods')):
+      reading_methods = True
+    elif reading_methods and line.startswith('name'):
+      assert current_class is not None
+      current_method = Method(
+          DEX_METHOD_NAME_RE.search(line).group('method_name'),
+          "L" + current_class.name + ";")
+    elif reading_methods and line.startswith('type'):
+      assert current_method is not None
+      match = DEX_METHOD_TYPE_RE.search(line)
+      current_method.param_types = match.group('method_params')
+      current_method.return_type = match.group('method_return_type')
+    elif line.startswith('positions'):
+      assert reading_methods
+      reading_positions = True
+      method_line_numbers = []
+    elif reading_positions and line.startswith('0x'):
+      line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number')
+      method_line_numbers.append(int(line_number))
+    elif reading_positions and line.startswith('locals'):
+      if len(method_line_numbers) > 0:
+        current_class.AddMethod(current_method, method_line_numbers)
+      # finished reading method line numbers
+      reading_positions = False
+  return classes_by_name
+
+
+def ProcessProguardMapping(proguard_mapping_lines, dex):
+  """Parses a proguard mapping file
+
+  This takes proguard mapping file lines and then uses the obfuscated dex to
+  create a mapping of unobfuscated methods to obfuscated ones and vice versa.
+
+  The dex is used because the proguard mapping file only has the name of the
+  obfuscated methods but not their signature, thus the dex is read to look up
+  which method with a specific name was mapped to the lines mentioned in the
+  proguard mapping file.
+
+  Args:
+    proguard_mapping_lines: Array of strings, each is a line from the proguard
+                            mapping file (in order).
+    dex: a dict of class name (in type descriptor format but without the
+         enclosing 'L' and ';') to a Class object.
+  Returns:
+    Two dicts the first maps from obfuscated methods to a set of non-obfuscated
+    ones. It also maps the obfuscated class names to original class names, both
+    in type descriptor format (with the enclosing 'L' and ';')
+  """
+  mapping = ProguardMapping()
+  reverse_mapping = ProguardMapping()
+  to_be_obfuscated = []
+  current_class_orig = None
+  current_class_obfs = None
+  for index, line in enumerate(proguard_mapping_lines):
+    if line.strip() == '':
+      continue
+    if not line.startswith(' '):
+      match = PROGUARD_CLASS_MAPPING_RE.search(line)
+      if match is None:
+        raise MalformedProguardMappingException(
+            'Malformed class mapping', index)
+      current_class_orig = match.group('original_name')
+      current_class_obfs = match.group('obfuscated_name')
+      mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs),
+                              _ToTypeDescriptor(current_class_orig))
+      reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig),
+                                      _ToTypeDescriptor(current_class_obfs))
+      continue
+
+    assert current_class_orig is not None
+    assert current_class_obfs is not None
+    line = line.strip()
+    match = PROGUARD_METHOD_MAPPING_RE.search(line)
+    # check if is a method mapping (we ignore field mappings)
+    if match is not None:
+      # check if this line is an inlining by reading ahead 1 line.
+      if index + 1 < len(proguard_mapping_lines):
+        next_match = PROGUARD_METHOD_MAPPING_RE.search(
+            proguard_mapping_lines[index+1].strip())
+        if (next_match and match.group('line_start') is not None
+            and next_match.group('line_start') == match.group('line_start')
+            and next_match.group('line_end') == match.group('line_end')):
+          continue # This is an inlining, skip
+
+      original_method = Method(
+          match.group('original_method_name'),
+          _ToTypeDescriptor(
+              match.group('original_method_class') or current_class_orig),
+          _DotNotationListToTypeDescriptorList(match.group('params')),
+          _ToTypeDescriptor(match.group('return_type')))
+
+      if match.group('line_start') is not None:
+        obfs_methods = (dex[current_class_obfs.replace('.', '/')]
+            .FindMethodsAtLine(
+                match.group('obfuscated_name'),
+                int(match.group('line_start')),
+                int(match.group('line_end'))))
+
+        if obfs_methods is None:
+          continue
+
+        for obfs_method in obfs_methods:
+          mapping.AddMethodMapping(obfs_method, original_method)
+          reverse_mapping.AddMethodMapping(original_method, obfs_method)
+      else:
+        to_be_obfuscated.append(
+            (original_method, match.group('obfuscated_name')))
+
+  for original_method, obfuscated_name in to_be_obfuscated:
+    obfuscated_method = Method(
+        obfuscated_name,
+        reverse_mapping.GetClassMapping(original_method.class_name),
+        reverse_mapping.MapTypeDescriptorList(original_method.param_types),
+        reverse_mapping.MapTypeDescriptor(original_method.return_type))
+    mapping.AddMethodMapping(obfuscated_method, original_method)
+    reverse_mapping.AddMethodMapping(original_method, obfuscated_method)
+  return mapping, reverse_mapping
+
+
+def ProcessProfile(input_profile, proguard_mapping):
+  """Parses an android profile and uses the proguard mapping to (de)obfuscate it
+
+  This takes the android profile lines and for each method or class for the
+  profile, it uses the mapping to either obfuscate or deobfuscate (based on the
+  provided mapping) and returns a Profile object that stores this information.
+
+  Args:
+    input_profile: array of lines of the input profile
+    proguard_mapping: a proguard mapping that would map from the classes and
+                      methods in the input profile to the classes and methods
+                      that should be in the output profile.
+
+  Returns:
+    A Profile object that stores the information (ie list of mapped classes and
+    methods + tags)
+  """
+  profile = Profile()
+  for index, line in enumerate(input_profile):
+    line = line.strip()
+    if line.startswith('L'):
+      profile.AddClass(proguard_mapping.GetClassMapping(line))
+      continue
+    match = PROFILE_METHOD_RE.search(line)
+    if not match:
+      raise MalformedProfileException("Malformed line", index)
+
+    method = Method(
+        match.group('method_name'),
+        match.group('class_name'),
+        match.group('method_params'),
+        match.group('method_return_type'))
+
+    mapped_methods = proguard_mapping.GetMethodMapping(method)
+    if mapped_methods is None:
+      logging.warning('No method matching "%s" has been found in the proguard '
+                      'mapping file', method)
+      continue
+
+    for original_method in mapped_methods:
+      profile.AddMethod(original_method, match.group('tags'))
+
+  return profile
+
+
+def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping,
+                     dexdump_path, output_filename):
+  """Helper method for obfuscating a profile.
+
+  Args:
+    nonobfuscated_profile: a profile with nonobfuscated symbols.
+    dex_file: path to the dex file matching the mapping.
+    proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used
+      in the dex file.
+    dexdump_path: path to the dexdump utility.
+    output_filename: output filename in which to write the obfuscated profile.
+  """
+  dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file))
+  _, reverse_mapping = ProcessProguardMapping(
+      _ReadFile(proguard_mapping), dexinfo)
+  obfuscated_profile = ProcessProfile(
+      _ReadFile(nonobfuscated_profile), reverse_mapping)
+  obfuscated_profile.WriteToFile(output_filename)
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--dexdump-path',
+      required=True,
+      help='Path to dexdump binary.')
+  parser.add_argument(
+      '--dex-path',
+      required=True,
+      help='Path to dex file corresponding to the proguard mapping file.')
+  parser.add_argument(
+      '--proguard-mapping-path',
+      required=True,
+      help='Path to input proguard mapping file corresponding to the dex file.')
+  parser.add_argument(
+      '--output-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--input-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--verbose',
+      action='store_true',
+      default=False,
+      help='Print verbose output.')
+  obfuscation = parser.add_mutually_exclusive_group(required=True)
+  obfuscation.add_argument('--obfuscate', action='store_true',
+      help='Indicates to output an obfuscated profile given a deobfuscated '
+     'one.')
+  obfuscation.add_argument('--deobfuscate', dest='obfuscate',
+      action='store_false', help='Indicates to output a deobfuscated profile '
+      'given an obfuscated one.')
+  options = parser.parse_args(args)
+
+  if options.verbose:
+    log_level = logging.WARNING
+  else:
+    log_level = logging.ERROR
+  logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+  dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path))
+  proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping(
+      _ReadFile(options.proguard_mapping_path), dex)
+  if options.obfuscate:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        reverse_proguard_mapping)
+  else:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        proguard_mapping)
+  profile.WriteToFile(options.output_profile_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/convert_dex_profile_tests.py b/src/build/android/convert_dex_profile_tests.py
new file mode 100644
index 0000000..0ddc5ce
--- /dev/null
+++ b/src/build/android/convert_dex_profile_tests.py
@@ -0,0 +1,276 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for convert_dex_profile.
+
+Can be run from build/android/:
+  $ cd build/android
+  $ python convert_dex_profile_tests.py
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+import convert_dex_profile as cp
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import build_utils
+
+cp.logging.disable(cp.logging.CRITICAL)
+
+# There are two obfuscations used in the tests below, each with the same
+# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING,
+# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both
+# getInstance and initialize. The second, corresponding to DEX_DUMP_2,
+# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity.
+
+DEX_DUMP = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    4:4:void inlined():237:237 -> a
+    4:4:org.chromium.Original getInstance():203 -> a
+    5:5:void org.chromium.Original$Subclass.<init>(org.chromium.Original,byte):130:130 -> a
+    5:5:void initialize():237 -> a
+    5:5:org.chromium.Original getInstance():203 -> a
+    6:6:void initialize():237:237 -> a
+    9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a
+    9:9:android.content.Context getContext():219 -> a
+    9:9:void initialize():245 -> a
+    9:9:org.chromium.Original getInstance():203 -> a"""
+
+OBFUSCATED_PROFILE = \
+"""La;
+PLa;->b()La;
+SLa;->a(Ljava/lang/Object;)I
+HPLa;->a(Ljava/lang/String;)I"""
+
+DEX_DUMP_2 = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'c'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING_2 = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    void initialize() -> c
+    org.chromium.Original getInstance():203 -> a
+    4:4:void inlined():237:237 -> a"""
+
+OBFUSCATED_PROFILE_2 = \
+"""La;
+PLa;->b()La;
+HPSLa;->a()La;
+HPLa;->c()V"""
+
+UNOBFUSCATED_PROFILE = \
+"""Lorg/chromium/Original;
+PLorg/chromium/Original;->another()Lorg/chromium/Original;
+HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original;
+HPLorg/chromium/Original;->initialize()V"""
+
+class GenerateProfileTests(unittest.TestCase):
+  def testProcessDex(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    self.assertIsNotNone(dex['a'])
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 311, 313)), 1)
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 309, 315)), 1)
+    clinit = dex['a'].FindMethodsAtLine('<clinit>', 311, 313)[0]
+    self.assertEquals(clinit.name, '<clinit>')
+    self.assertEquals(clinit.return_type, 'V')
+    self.assertEquals(clinit.param_types, 'Ljava/lang/String;')
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2)
+    self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None))
+
+# pylint: disable=protected-access
+  def testProcessProguardMapping(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, reverse = cp.ProcessProguardMapping(
+        PROGUARD_MAPPING.splitlines(), dex)
+
+    self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;'))
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    subclassInit = cp.Method(
+        '<init>', 'Lorg/chromium/Original$Subclass;',
+        'Lorg/chromium/Original;B', 'V')
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/String;', 'I'))
+    self.assertEquals(len(mapped), 2)
+    self.assertIn(getInstance, mapped)
+    self.assertNotIn(subclassInit, mapped)
+    self.assertNotIn(
+        cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped)
+    self.assertIn(initialize, mapped)
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(getInstance, mapped)
+
+    mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(another, mapped)
+
+    for from_method, to_methods in mapping._method_mapping.iteritems():
+      for to_method in to_methods:
+        self.assertIn(from_method, reverse.GetMethodMapping(to_method))
+    for from_class, to_class in mapping._class_mapping.iteritems():
+      self.assertEquals(from_class, reverse.GetClassMapping(to_class))
+
+  def testProcessProfile(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+
+    self.assertIn('Lorg/chromium/Original;', profile._classes)
+    self.assertIn(getInstance, profile._methods)
+    self.assertIn(initialize, profile._methods)
+    self.assertIn(another, profile._methods)
+
+    self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P']))
+    self.assertEquals(profile._methods[initialize], set(['H', 'P']))
+    self.assertEquals(profile._methods[another], set(['P']))
+
+  def testEndToEnd(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+    with tempfile.NamedTemporaryFile() as temp:
+      profile.WriteToFile(temp.name)
+      with open(temp.name, 'r') as f:
+        for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())):
+          self.assertEquals(a.strip(), b.strip())
+
+  def testObfuscateProfile(self):
+    with build_utils.TempDir() as temp_dir:
+      # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump
+      # program.
+      dex_path = os.path.join(temp_dir, 'dexdump')
+      with open(dex_path, 'w') as dex_file:
+        dex_file.write(DEX_DUMP_2)
+      mapping_path = os.path.join(temp_dir, 'mapping')
+      with open(mapping_path, 'w') as mapping_file:
+        mapping_file.write(PROGUARD_MAPPING_2)
+      unobfuscated_path = os.path.join(temp_dir, 'unobfuscated')
+      with open(unobfuscated_path, 'w') as unobfuscated_file:
+        unobfuscated_file.write(UNOBFUSCATED_PROFILE)
+      obfuscated_path = os.path.join(temp_dir, 'obfuscated')
+      cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat',
+                          obfuscated_path)
+      with open(obfuscated_path) as obfuscated_file:
+        obfuscated_profile = sorted(obfuscated_file.readlines())
+      for a, b in zip(
+          sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile):
+        self.assertEquals(a.strip(), b.strip())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/dcheck_is_off.flags b/src/build/android/dcheck_is_off.flags
new file mode 100644
index 0000000..78b9cc2
--- /dev/null
+++ b/src/build/android/dcheck_is_off.flags
@@ -0,0 +1,17 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Contains flags that are applied only when ENABLE_DCHECK=false.
+
+-checkdiscard @org.chromium.base.annotations.CheckDiscard class ** {
+  *;
+}
+-checkdiscard class ** {
+  @org.chromium.base.annotations.CheckDiscard *;
+}
+
+# Ensure @RemovableInRelease actually works.
+-checkdiscard class ** {
+  @org.chromium.base.annotations.RemovableInRelease *;
+}
diff --git a/src/build/android/devil_chromium.json b/src/build/android/devil_chromium.json
new file mode 100644
index 0000000..0bfcfd8
--- /dev/null
+++ b/src/build/android/devil_chromium.json
@@ -0,0 +1,120 @@
+{
+  "config_type": "BaseConfig",
+  "dependencies": {
+    "aapt": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/aapt"
+          ]
+        }
+      }
+    },
+    "adb": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/platform-tools/adb"
+          ]
+        }
+      }
+    },
+    "android_build_tools_libc++": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so"
+          ]
+        }
+      }
+    },
+    "android_sdk": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public"
+          ]
+        }
+      }
+    },
+    "dexdump": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump"
+          ]
+        }
+      }
+    },
+    "split-select": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/split-select"
+          ]
+        }
+      }
+    },
+    "simpleperf": {
+      "file_info": {
+        "android_armeabi-v7a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf"
+          ]
+        },
+        "android_arm64-v8a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf"
+          ]
+        },
+        "android_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf"
+          ]
+        },
+        "android_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf"
+          ]
+        },
+        "linux_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf"
+          ]
+        },
+        "linux_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf"
+          ]
+        }
+      }
+    },
+    "simpleperf_scripts": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf"
+          ]
+        }
+      }
+    },
+    "llvm-symbolizer": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer"
+          ]
+        }
+      }
+    },
+    "bundletool": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar"
+          ]
+        }
+      }
+    }
+  }
+}
diff --git a/src/build/android/devil_chromium.py b/src/build/android/devil_chromium.py
new file mode 100644
index 0000000..20ae1e3
--- /dev/null
+++ b/src/build/android/devil_chromium.py
@@ -0,0 +1,200 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configures devil for use in chromium."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.insert(1, host_paths.DEVIL_PATH)
+
+from devil import devil_env
+from devil.android.ndk import abis
+
+_BUILD_DIR = os.path.join(constants.DIR_SOURCE_ROOT, 'build')
+if _BUILD_DIR not in sys.path:
+  sys.path.insert(1, _BUILD_DIR)
+
+import gn_helpers
+
+_DEVIL_CONFIG = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), 'devil_chromium.json'))
+
+_DEVIL_BUILD_PRODUCT_DEPS = {
+  'chromium_commands': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['lib.java', 'chromium_commands.dex.jar'],
+    }
+  ],
+  'forwarder_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['forwarder_dist'],
+    },
+  ],
+  'forwarder_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['host_forwarder'],
+    },
+  ],
+  'md5sum_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['md5sum_dist'],
+    },
+  ],
+  'md5sum_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['md5sum_bin_host'],
+    },
+  ],
+}
+
+
+def _UseLocalBuildProducts(output_directory, devil_dynamic_config):
+  output_directory = os.path.abspath(output_directory)
+  devil_dynamic_config['dependencies'] = {
+      dep_name: {
+          'file_info': {
+              '%s_%s' % (dep_config['platform'], dep_config['arch']): {
+                  'local_paths': [
+                      os.path.join(output_directory,
+                                   *dep_config['path_components']),
+                  ],
+              }
+              for dep_config in dep_configs
+          }
+      }
+      for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems()
+  }
+
+
+def _BuildWithChromium():
+  """Returns value of gclient's |build_with_chromium|."""
+  gni_path = os.path.join(_BUILD_DIR, 'config', 'gclient_args.gni')
+  if not os.path.exists(gni_path):
+    return False
+  with open(gni_path) as f:
+    data = f.read()
+  args = gn_helpers.FromGNArgs(data)
+  return args.get('build_with_chromium', False)
+
+
+def Initialize(output_directory=None, custom_deps=None, adb_path=None):
+  """Initializes devil with chromium's binaries and third-party libraries.
+
+  This includes:
+    - Libraries:
+      - the android SDK ("android_sdk")
+    - Build products:
+      - host & device forwarder binaries
+          ("forwarder_device" and "forwarder_host")
+      - host & device md5sum binaries ("md5sum_device" and "md5sum_host")
+
+  Args:
+    output_directory: An optional path to the output directory. If not set,
+      no built dependencies are configured.
+    custom_deps: An optional dictionary specifying custom dependencies.
+      This should be of the form:
+
+        {
+          'dependency_name': {
+            'platform': 'path',
+            ...
+          },
+          ...
+        }
+    adb_path: An optional path to use for the adb binary. If not set, this uses
+      the adb binary provided by the Android SDK.
+  """
+  build_with_chromium = _BuildWithChromium()
+
+  devil_dynamic_config = {
+    'config_type': 'BaseConfig',
+    'dependencies': {},
+  }
+  if build_with_chromium and output_directory:
+    # Non-chromium users of chromium's //build directory fetch build products
+    # from google storage rather than use locally built copies. Chromium uses
+    # locally-built copies so that changes to the tools can be easily tested.
+    _UseLocalBuildProducts(output_directory, devil_dynamic_config)
+
+  if custom_deps:
+    devil_dynamic_config['dependencies'].update(custom_deps)
+  if adb_path:
+    devil_dynamic_config['dependencies'].update({
+      'adb': {
+        'file_info': {
+          devil_env.GetPlatform(): {
+            'local_paths': [adb_path]
+          }
+        }
+      }
+    })
+
+  config_files = [_DEVIL_CONFIG] if build_with_chromium else None
+  devil_env.config.Initialize(configs=[devil_dynamic_config],
+                              config_files=config_files)
diff --git a/src/build/android/devil_chromium.pydeps b/src/build/android/devil_chromium.pydeps
new file mode 100644
index 0000000..4143805
--- /dev/null
+++ b/src/build/android/devil_chromium.pydeps
@@ -0,0 +1,39 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/six/six.py
+../gn_helpers.py
+devil_chromium.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
diff --git a/src/build/android/diff_resource_sizes.py b/src/build/android/diff_resource_sizes.py
new file mode 100755
index 0000000..eefb6cd
--- /dev/null
+++ b/src/build/android/diff_resource_sizes.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs resource_sizes.py on two apks and outputs the diff."""
+
+from __future__ import print_function
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from pylib.constants import host_paths
+from pylib.utils import shared_preference_utils
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+  from tracing.value import convert_chart_json # pylint: disable=import-error
+
+_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
+with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')):
+  from util import build_utils  # pylint: disable=import-error
+
+
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes_diff',
+    'benchmark_description': 'APK resource size diff information',
+    'trace_rerun_options': [],
+    'charts': {},
+}
+
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
+
+
+def DiffResults(chartjson, base_results, diff_results):
+  """Reports the diff between the two given results.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in, or None
+        to only print results.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title, trace_title,
+          diff_results['charts'][graph_title][trace_title]['value']
+              - trace['value'],
+          trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def AddIntermediateResults(chartjson, base_results, diff_results):
+  """Copies the intermediate size results into the output chartjson.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_base_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+  # Both base_results and diff_results should have the same charts/traces, but
+  # loop over them separately in case they don't
+  for graph_title, graph in diff_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_diff_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def _CreateArgparser():
+  def chromium_path(arg):
+    if arg.startswith('//'):
+      return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:])
+    return arg
+
+  argparser = argparse.ArgumentParser(
+      description='Diff resource sizes of two APKs. Arguments not listed here '
+                  'will be passed on to both invocations of resource_sizes.py.')
+  argparser.add_argument('--chromium-output-directory-base',
+                         dest='out_dir_base',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the base '
+                              'APK, i.e. what the size increase/decrease will '
+                              'be measured from.')
+  argparser.add_argument('--chromium-output-directory-diff',
+                         dest='out_dir_diff',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the diff '
+                              'APK.')
+  argparser.add_argument('--chartjson',
+                         action='store_true',
+                         help='DEPRECATED. Use --output-format=chartjson '
+                              'instead.')
+  argparser.add_argument('--output-format',
+                         choices=['chartjson', 'histograms'],
+                         help='Output the results to a file in the given '
+                              'format instead of printing the results.')
+  argparser.add_argument('--include-intermediate-results',
+                         action='store_true',
+                         help='Include the results from the resource_sizes.py '
+                              'runs in the chartjson output.')
+  argparser.add_argument('--output-dir',
+                         default='.',
+                         type=chromium_path,
+                         help='Directory to save chartjson to.')
+  argparser.add_argument('--base-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the base APK, i.e. what the size '
+                              'increase/decrease will be measured from.')
+  argparser.add_argument('--diff-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the diff APK, i.e. the APK whose size '
+                              'increase/decrease will be measured against the '
+                              'base APK.')
+  return argparser
+
+
+def main():
+  args, unknown_args = _CreateArgparser().parse_known_args()
+  # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+  if args.chartjson:
+    args.output_format = 'chartjson'
+
+  chartjson = _BASE_CHART.copy() if args.output_format else None
+
+  with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
+    # Run resource_sizes.py on the two APKs
+    resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
+    shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+                   + unknown_args)
+
+    base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
+    if args.out_dir_base:
+      base_args += ['--chromium-output-directory', args.out_dir_base]
+    try:
+      subprocess.check_output(base_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
+    if args.out_dir_diff:
+      diff_args += ['--chromium-output-directory', args.out_dir_diff]
+    try:
+      subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    # Combine the separate results
+    base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+    diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
+    base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
+    diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
+    DiffResults(chartjson, base_results, diff_results)
+    if args.include_intermediate_results:
+      AddIntermediateResults(chartjson, base_results, diff_results)
+
+    if args.output_format:
+      chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+                                    _CHARTJSON_FILENAME)
+      logging.critical('Dumping diff chartjson to %s', chartjson_path)
+      with open(chartjson_path, 'w') as outfile:
+        json.dump(chartjson, outfile)
+
+      if args.output_format == 'histograms':
+        histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+        if histogram_result.returncode != 0:
+          logging.error('chartjson conversion failed with error: %s',
+              histogram_result.stdout)
+          return 1
+
+        histogram_path = os.path.join(os.path.abspath(args.output_dir),
+            'perf_results.json')
+        logging.critical('Dumping diff histograms to %s', histogram_path)
+        with open(histogram_path, 'w') as json_file:
+          json_file.write(histogram_result.stdout)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/docs/README.md b/src/build/android/docs/README.md
new file mode 100644
index 0000000..6392f7d
--- /dev/null
+++ b/src/build/android/docs/README.md
@@ -0,0 +1,13 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [java_toolchain.md](java_toolchain.md)
+* [java_optimization.md](java_optimization.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/src/build/android/docs/android_app_bundles.md b/src/build/android/docs/android_app_bundles.md
new file mode 100644
index 0000000..e71fe27
--- /dev/null
+++ b/src/build/android/docs/android_app_bundles.md
@@ -0,0 +1,205 @@
+# Introduction
+
+This document describes how the Chromium build system supports Android app
+bundles.
+
+[TOC]
+
+# Overview of app bundles
+
+An Android app bundle is an alternative application distribution format for
+Android applications on the Google Play Store, that allows reducing the size
+of binaries sent for installation to individual devices that run on Android L
+and beyond. For more information about them, see the official Android
+[documentation](https://developer.android.com/guide/app-bundle/).
+
+For the context of this document, the most important points are:
+
+  - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
+    be installed directly on a device.
+
+  - Instead, it must be processed into a set of installable split APKs, which
+    are stored inside a special zip archive (e.g. `foo.apks`).
+
+  - The splitting can be based on various criteria: e.g. language or screen
+    density for resources, or cpu ABI for native code.
+
+  - The bundle also uses the notion of dynamic features modules (DFMs) to
+    separate several application features. Each module has its own code, assets
+    and resources, and can be installed separately from the rest of the
+    application if needed.
+
+  - The main application itself is stored in the '`base`' module (this name
+    cannot be changed).
+
+
+# Declaring app bundles with GN templates
+
+Here's an example that shows how to declare a simple bundle that contains a
+single base module, which enables language-based splits:
+
+```gn
+
+  # First declare the first bundle module. The base module is the one
+  # that contains the main application's code, resources and assets.
+  android_app_bundle_module("foo_base_module") {
+    # Declaration are similar to android_apk here.
+    ...
+  }
+
+  # Second, declare the bundle itself.
+  android_app_bundle("foo_bundle") {
+    # Indicate the base module to use for this bundle
+    base_module_target = ":foo_base_module"
+
+    # The name of our bundle file (without any suffix). Default would
+    # be 'foo_bundle' otherwise.
+    bundle_name = "FooBundle"
+
+    # Enable language-based splits for this bundle. Which means that
+    # resources and assets specific to a given language will be placed
+    # into their own split APK in the final .apks archive.
+    enable_language_splits = true
+
+    # Proguard settings must be passed at the bundle, not module, target.
+    proguard_enabled = !is_java_debug
+  }
+```
+
+When generating the `foo_bundle` target with Ninja, you will end up with
+the following:
+
+  - The bundle file under `out/Release/apks/FooBundle.aab`
+
+  - A helper script called `out/Release/bin/foo_bundle`, which can be used
+    to install / launch / uninstall the bundle on local devices.
+
+    This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
+    to see all possible commands supported by the script.
+
+
+# Declaring dynamic feature modules with GN templates
+
+Please see
+[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
+more details. In short, if you need more modules besides the base one, you
+will need to list all the extra ones using the extra_modules variable which
+takes a list of GN scopes, as in:
+
+```gn
+
+  android_app_bundle_module("foo_base_module") {
+    ...
+  }
+
+  android_app_bundle_module("foo_extra_module") {
+    ...
+  }
+
+  android_app_bundle("foo_bundle") {
+    base_module_target = ":foo_base_module"
+
+    extra_modules = [
+      { # NOTE: Scopes require one field per line, and no comma separators.
+        name = "my_module"
+        module_target = ":foo_extra_module"
+      }
+    ]
+
+    ...
+  }
+```
+
+Note that each extra module is identified by a unique name, which cannot
+be '`base`'.
+
+
+# Bundle signature issues
+
+Signing an app bundle is not necessary, unless you want to upload it to the
+Play Store. Since this process is very slow (it uses `jarsigner` instead of
+the much faster `apkbuilder`), you can control it with the `sign_bundle`
+variable, as described in the example above.
+
+The `.apks` archive however always contains signed split APKs. The keystore
+path/password/alias being used are the default ones, unless you use custom
+values when declaring the bundle itself, as in:
+
+```gn
+  android_app_bundle("foo_bundle") {
+    ...
+    keystore_path = "//path/to/keystore"
+    keystore_password = "K3y$t0Re-Pa$$w0rd"
+    keystore_name = "my-signing-key-name"
+  }
+```
+
+These values are not stored in the bundle itself, but in the wrapper script,
+which will use them to generate the `.apks` archive for you. This allows you
+to properly install updates on top of existing applications on any device.
+
+
+# Proguard and bundles
+
+When using an app bundle that is made of several modules, it is crucial to
+ensure that proguard, if enabled:
+
+- Keeps the obfuscated class names used by each module consistent.
+- Does not remove classes that are not used in one module, but referenced
+  by others.
+
+To achieve this, a special scheme called *synchronized proguarding* is
+performed, which consists of the following steps:
+
+- The list of unoptimized .jar files from all modules are sent to a single
+  proguard command. This generates a new temporary optimized *group* .jar file.
+
+- Each module extracts the optimized class files from the optimized *group*
+  .jar file, to generate its own, module-specific, optimized .jar.
+
+- Each module-specific optimized .jar is then sent to dex generation.
+
+This synchronized proguarding step is added by the `android_app_bundle()` GN
+template. In practice this means the following:
+
+  - `proguard_enabled` must be passed to `android_app_bundle` targets, but not
+    to `android_app_bundle_module` ones.
+
+  - `proguard_configs` can be still passed to individual modules, just
+    like regular APKs. All proguard configs will be merged during the
+    synchronized proguard step.
+
+
+# Manual generation and installation of .apks archives
+
+Note that the `foo_bundle` script knows how to generate the .apks archive
+from the bundle file, and install it to local devices for you. For example,
+to install and launch a bundle, use:
+
+```sh
+  out/Release/bin/foo_bundle run
+```
+
+If you want to manually look or use the `.apks` archive, use the following
+command to generate it:
+
+```sh
+  out/Release/bin/foo_bundle build-bundle-apks \
+      --output-apks=/tmp/BundleFoo.apks
+```
+
+All split APKs within the archive will be properly signed. And you will be
+able to look at its content (with `unzip -l`), or install it manually with:
+
+```sh
+  build/android/gyp/bundletool.py install-apks \
+      --apks=/tmp/BundleFoo.apks \
+      --adb=$(which adb)
+```
+
+The task of examining the manifest is simplified by running the following,
+which dumps the application manifest as XML to stdout:
+
+```sh
+  build/android/gyp/bundletool.py dump-manifest
+```
diff --git a/src/build/android/docs/build_config.md b/src/build/android/docs/build_config.md
new file mode 100644
index 0000000..8a301c8
--- /dev/null
+++ b/src/build/android/docs/build_config.md
@@ -0,0 +1,168 @@
+# Introduction
+
+This document describes the `.build_config` files that are used by the
+Chromium build system for Android-specific targets like APK, resources,
+and more.
+
+[TOC]
+
+# I. Overview of .build_config files:
+
+The Android build requires performing computations about dependencies in
+various targets, which are not possible with the GN build language. To address
+this, `.build_config` files are written during the build to store the needed
+per-target information as JSON files.
+
+They are always written to `$target_gen_dir/${target_name}.build_config`.
+
+Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used
+during the build, can also accept parameter arguments using
+`@FileArg references`, which look like:
+
+    --some-param=@FileArg(<filename>:<key1>:<key2>:..<keyN>)
+
+This placeholder will ensure that `<filename>` is read as a JSON file, then
+return the value at `[key1][key2]...[keyN]` for the `--some-param` option.
+
+Apart from that, the scripts do not need to know anything about the structure
+of `.build_config` files (but the GN rules that invoke them do and select
+which `@FileArg()` references to use).
+
+For a concrete example, consider the following GN fragment:
+
+```gn
+# From //ui/android/BUILD.gn:
+android_resources("ui_java_resources") {
+  custom_package = "org.chromium.ui"
+  resource_dirs = [ "java/res" ]
+  deps = [
+    ":ui_strings_grd",
+  ]
+}
+```
+
+This will end up generating the following JSON file under
+`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`:
+
+```json
+{
+  "deps_info": {
+    "deps_configs": [
+      "gen/ui/android/ui_strings_grd.build_config"
+    ],
+    "name": "ui_java_resources.build_config",
+    "package_name": "org.chromium.ui",
+    "path": "gen/ui/android/ui_java_resources.build_config",
+    "r_text": "gen/ui/android/ui_java_resources_R.txt",
+    "resources_dirs": [
+      "../../ui/android/java/res"
+    ],
+    "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip",
+    "srcjar": "gen/ui/android/ui_java_resources.srcjar",
+    "type": "android_resources"
+  },
+  "gradle": {},
+  "resources": {
+    "dependency_zips": [
+      "resource_zips/ui/android/ui_strings_grd.resources.zip"
+    ],
+    "extra_package_names": [],
+  }
+}
+```
+
+NOTE: All path values in `.build_config` files are relative to your
+`$CHROMIUM_OUTPUT_DIR`.
+
+# II. Generation of .build_config files:
+
+They are generated by the GN [`write_build_config()`](gn_write_build_config)
+internal template, which ends up invoking
+[`write_build_config.py`](write_build_config_py). For our example above, this
+is with the following parameters:
+
+```
+python ../../build/android/gyp/write_build_config.py \
+    --type=android_resources \
+    --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \
+    --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \
+    --build-config gen/ui/android/ui_java_resources.build_config \
+    --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \
+    --package-name org.chromium.ui \
+    --r-text gen/ui/android/ui_java_resources_R.txt \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --srcjar gen/ui/android/ui_java_resources.srcjar
+```
+
+Note that *most* of the content of the JSON file comes from command-line
+parameters, but not all of it.
+
+In particular, the `resources['dependency_zips']` entry was computed by
+inspecting the content of all dependencies (here, only
+`ui_string_grd.build_config`), and collecting their
+`deps_configs['resources_zip']` values.
+
+Because a target's `.build_config` file will always be generated after
+that of all of its dependencies,
+[`write_build_config.py`](write_build_config_py) can traverse the
+whole (transitive) set of direct *and* indirect dependencies for a given target
+and extract useful information out of it.
+
+This is the kind of processing that cannot be done at the GN language level,
+and is very powerful for Android builds.
+
+
+# III. Usage of .build_config files:
+
+In addition to being parsed by `write_build_config.py`, when they are listed
+in the `--deps-configs` of a given target, the `.build_config` files are used
+by other scripts under [build/android/gyp/] to build stuff.
+
+For example, the GN `android_resources` template uses it to invoke the
+[`process_resources.py`] script with the following command, in order to
+generate various related files (e.g. `ui_java_resources_R.txt`):
+
+```sh
+python ../../build/android/gyp/process_resources.py \
+    --depfile gen/ui/android/ui_java_resources_1.d \
+    --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-29/android.jar \
+    --aapt-path ../../third_party/android_sdk/public/build-tools/29.0.2/aapt \
+    --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \
+    --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --debuggable \
+    --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \
+    --r-text-out gen/ui/android/ui_java_resources_R.txt \
+    --srcjar-out gen/ui/android/ui_java_resources.srcjar \
+    --non-constant-id \
+    --custom-package org.chromium.ui \
+    --shared-resources
+```
+
+Note the use of `@FileArg()` references here, to tell the script where to find
+the information it needs.
+
+
+# IV. Format of .build_config files:
+
+Thanks to `@FileArg()` references, Python build scripts under
+[`build/android/gyp/`](build/android/gyp/)  do not need to know anything
+about the internal format of `.build_config` files.
+
+This format is decided between internal GN build rules and
+[`write_build_config.py`][write_build_config_py]. Since these changes rather
+often, the format documentation is kept inside the Python script itself, but
+can be extracted as a Markdown file and visualized with the following commands:
+
+```sh
+# Extract .build_config format documentation
+build/android/gyp/write_build_config.py \
+  --generate-markdown-format-doc > /tmp/format.md
+
+# Launch a browser to visualize the format documentation.
+python tools/md_browser/md_browser.py -d /tmp /tmp/format.md
+```
+
+[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/
+[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium
+[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/write_build_config.py
diff --git a/src/build/android/docs/class_verification_failures.md b/src/build/android/docs/class_verification_failures.md
new file mode 100644
index 0000000..e3e4745
--- /dev/null
+++ b/src/build/android/docs/class_verification_failures.md
@@ -0,0 +1,286 @@
+# Class Verification Failures
+
+[TOC]
+
+## What's this all about?
+
+This document aims to explain class verification on Android, how this can affect
+app performance, how to identify problems, and chromium-specific solutions. For
+simplicity, this document focuses on how class verification is implemented by
+ART, the virtual machine which replaced Dalvik starting in Android Lollipop.
+
+## What is class verification?
+
+The Java language requires any virtual machine to _verify_ the class files it
+loads and executes. Generally, verification is extra work the virtual machine is
+responsible for doing, on top of the work of loading the class and performing
+[class initialization][1].
+
+A class may fail verification for a wide variety of reasons, but in practice
+it's usually because the class's code refers to unknown classes or methods. An
+example case might look like:
+
+```java
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return mWindow.isWideColorGamut();
+        }
+        return false;
+    }
+}
+```
+
+### Why does that fail?
+
+In this example, `WindowHelper` is a helper class intended to help callers
+figure out wide color gamut support, even on pre-OMR1 devices. However, this
+class will fail class verification on pre-OMR1 devices, because it refers to
+[`Window#isWideColorGamut()`][2] (new-in-OMR1), which appears to be an undefined
+method.
+
+### Huh? But we have an SDK check!
+
+SDK checks are completely irrelevant for class verification. Although readers
+can see we'll never call the new-in-OMR1 API unless we're on >= OMR1 devices,
+the Oreo version of ART doesn't know `isWideColorGamut()` was added in next
+year's release. From ART's perspective, we may as well be calling
+`methodWhichDoesNotExist()`, which would clearly be unsafe.
+
+All the SDK check does is protect us from crashing at runtime if we call this
+method on Oreo or below.
+
+### Class verification on ART
+
+While the above is a mostly general description of class verification, it's
+important to understand how the Android runtime handles this.
+
+Since class verification is extra work, ART has an optimization called **AOT
+("ahead-of-time") verification**¹. Immediately after installing an app, ART will
+scan the dex files and verify as many classes as it can. If a class fails
+verification, this is usually a "soft failure" (hard failures are uncommon), and
+ART marks the class with the status `RetryVerificationAtRuntime`.
+
+`RetryVerificationAtRuntime`, as the name suggests, means ART must try again to
+verify the class at runtime. ART does so the first time you access the class
+(right before class initialization/`<clinit>()` method). However, depending on
+the class, this verification step can be very expensive (we've observed cases
+which take [several milliseconds][3]). Since apps tend to initialize most of
+their classes during startup, verification significantly increases startup time.
+
+Another minor cost to failing class verification is that ART cannot optimize
+classes which fail verification, so **all** methods in the class will perform
+slower at runtime, even after the verification step.
+
+*** aside
+¹ AOT _verification_ should not be confused with AOT _compilation_ (another ART
+feature). Unlike compilation, AOT verification happens during install time for
+every application, whereas recent versions of ART aim to apply AOT compilation
+selectively to optimize space.
+***
+
+## Chromium's solution
+
+In Chromium, we try to avoid doing class verification at runtime by
+manually out-of-lining all Android API usage like so:
+
+```java
+public class ApiHelperForOMR1 {
+    public static boolean isWideColorGamut(Window window) {
+        return window.isWideColorGamut();
+    }
+}
+
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return ApiHelperForOMR1.isWideColorGamut(mWindow);
+        }
+        return false;
+    }
+}
+```
+
+This pushes the class verification failure out of `WindowHelper` and into the
+new `ApiHelperForOMR1` class. There's no magic here: `ApiHelperForOMR1` will
+fail class verification on Oreo and below, for the same reason `WindowHelper`
+did previously.
+
+The key is that, while `WindowHelper` is used on all API levels, it only calls
+into `ApiHelperForOMR1` on OMR1 and above. Because we never use
+`ApiHelperForOMR1` on Oreo and below, we never load and initialize the class,
+and thanks to ART's lazy runtime class verification, we never actually retry
+verification. **Note:** `list_class_verification_failures.py` will still list
+`ApiHelperFor*` classes in its output, although these don't cause performance
+issues.
+
+### Creating ApiHelperFor\* classes
+
+There are several examples throughout the code base, but such classes should
+look as follows:
+
+```java
+/**
+ * Utility class to use new APIs that were added in O_MR1 (API level 27).
+ * These need to exist in a separate class so that Android framework can successfully verify
+ * classes without encountering the new APIs.
+ */
+@VerifiesOnOMR1
+@TargetApi(Build.VERSION_CODES.O_MR1)
+public class ApiHelperForOMR1 {
+    private ApiHelperForOMR1() {}
+
+    // ...
+}
+```
+
+* `@VerifiesOnO_MR1`: this is a chromium-defined annotation to tell proguard
+  (and similar tools) not to inline this class or its methods (since that would
+  defeat the point of out-of-lining!)
+* `@TargetApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to
+  use OMR1 APIs since this class is only used on OMR1 and above. Substitute
+  `O_MR1` for the [appropriate constant][4], depending when the APIs were
+  introduced.
+* Don't put any `SDK_INT` checks inside this class, because it must only be
+  called on >= OMR1.
+
+### Out-of-lining if your method has a new type in its signature
+
+Sometimes you'll run into a situation where a class **needs** to have a method
+which either accepts a parameter which is a new type or returns a new type
+(e.g., externally-facing code, such as WebView's glue layer). Even though it's
+impossible to write such a class without referring to the new type, it's still
+possible to avoid failing class verification. ART has a useful optimization: if
+your class only moves a value between registers (i.e., it doesn't call any
+methods or fields on the value), then ART will not check for the existence of
+that value's type. This means you can write your class like so:
+
+```java
+public class FooBar {
+    // FooBar needs to have the getNewTypeInAndroidP method, but it would be
+    // expensive to fail verification. This method will only be called on >= P
+    // but other methods on the class will be used on lower OS versions (and
+    // also can't be factored into another class).
+    public NewTypeInAndroidP getNewTypeInAndroidP() {
+        assert Build.VERSION.SDK_INT >= Build.VERSION_CODES.P;
+        // Stores a NewTypeInAndroidP in the return register, but doesn't do
+        // anything else with it
+        return ApiHelperForP.getNewTypeInAndroidP();
+    }
+
+    // ...
+}
+
+@VerifiesOnP
+@TargetApi(Build.VERSION_CODES.P)
+public class ApiHelperForP {
+    public static NewTypeInAndroidP getNewTypeInAndroidP() {
+        return new NewTypeInAndroidP();
+    }
+
+    // ...
+}
+```
+
+**Note:** this only works in ART (L+), not Dalvik (KitKat and earlier).
+
+## Investigating class verification failures
+
+Class verification is generally surprising and nonintuitive. Fortunately, the
+ART team have provided tools to investigate errors (and the chromium team has
+built helpful wrappers).
+
+### Listing failing classes
+
+The main starting point is to figure out which classes fail verification (those
+which ART marks as `RetryVerificationAtRuntime`). This can be done for **any
+Android app** (it doesn't have to be from the chromium project) like so:
+
+```shell
+# Install the app first. Using Chrome as an example.
+autoninja -C out/Default chrome_public_apk
+out/Default/bin/chrome_public_apk install
+
+# List all classes marked as 'RetryVerificationAtRuntime'
+build/android/list_class_verification_failures.py --package="org.chromium.chrome"
+W    0.000s Main  Skipping deobfuscation because no map file was provided.
+first.failing.Class
+second.failing.Class
+...
+```
+
+"Skipping deobfuscation because no map file was provided" is a warning, since
+many Android applications (including Chrome's release builds) are built with
+proguard (or similar tools) to obfuscate Java classes and shrink code. Although
+it's safe to ignore this warning if you don't obfuscate Java code, the script
+knows how to deobfuscate classes for you (useful for `is_debug = true` or
+`is_java_debug = true`):
+
+```shell
+build/android/list_class_verification_failures.py --package="org.chromium.chrome" \
+  --mapping=<path/to/file.mapping> # ex. out/Release/apks/ChromePublic.apk.mapping
+android.support.design.widget.AppBarLayout
+android.support.design.widget.TextInputLayout
+...
+```
+
+Googlers can also download mappings for [official
+builds](http://go/webview-official-builds).
+
+### Understanding the reason for the failure
+
+ART team also provide tooling for this. You can configure ART on a rooted device
+to log all class verification failures (during installation), at which point the
+cause is much clearer:
+
+```shell
+# Enable ART logging (requires root). Note the 2 pairs of quotes!
+adb root
+adb shell setprop dalvik.vm.dex2oat-flags '"--runtime-arg -verbose:verifier"'
+
+# Restart Android services to pick up the settings
+adb shell stop && adb shell start
+
+# Optional: clear logs which aren't relevant
+adb logcat -c
+
+# Install the app and check for ART logs
+adb install -d -r out/Default/apks/ChromePublic.apk
+adb logcat | grep 'dex2oat'
+...
+... I dex2oat : Soft verification failures in boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu)
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xF0] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xFA] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+...
+```
+
+*** note
+**Note:** you may want to avoid `adb` wrapper scripts (ex.
+`out/Default/bin/chrome_public_apk install`). These scripts cache the package
+manager state to optimize away idempotent installs. However in this case, we
+**do** want to trigger idempotent installs, because we want to re-trigger AOT
+verification.
+***
+
+In the above example, `SelectionPopupControllerImpl` fails verification on Oreo
+(API 26) because it refers to [`TextClassification.getActions()`][5], which was
+added in Pie (API 28). If `SelectionPopupControllerImpl` is used on pre-Pie
+devices, then `TextClassification.getActions()` must be out-of-lined.
+
+## See also
+
+* Bugs or questions? Contact ntfschr@chromium.org
+* ART team's Google I/O talks: [2014](https://youtu.be/EBlTzQsUoOw) and later
+  years
+* Analysis of class verification in Chrome and WebView (Google-only
+  [doc](http://go/class-verification-chromium-analysis))
+* Presentation on class verification in Chrome and WebView (Google-only
+  [slide deck](http://go/class-verification-chromium-slides))
+
+[1]: https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-5.html#jvms-5.5
+[2]: https://developer.android.com/reference/android/view/Window.html#isWideColorGamut()
+[3]: https://bugs.chromium.org/p/chromium/issues/detail?id=838702
+[4]: https://developer.android.com/reference/android/os/Build.VERSION_CODES
+[5]: https://developer.android.com/reference/android/view/textclassifier/TextClassification.html#getActions()
diff --git a/src/build/android/docs/coverage.md b/src/build/android/docs/coverage.md
new file mode 100644
index 0000000..17c83c6
--- /dev/null
+++ b/src/build/android/docs/coverage.md
@@ -0,0 +1,73 @@
+# Android code coverage instructions
+
+These are instructions for collecting code coverage data for android
+instrumentation and JUnit tests.
+
+[TOC]
+
+## How JaCoCo coverage works
+
+In order to use JaCoCo code coverage, we need to create build time pre-instrumented
+class files and runtime **.exec** files. Then we need to process them using the
+**build/android/generate_jacoco_report.py** script.
+
+## How to collect coverage data
+
+1. Use the following GN build arguments:
+
+  ```gn
+  target_os = "android"
+  use_jacoco_coverage = true
+  ```
+
+   Now when building, pre-instrumented files will be created in the build directory.
+
+2. Run tests, with option `--coverage-dir <directory>`, to specify where to save
+   the .exec file. For example, you can run chrome JUnit tests:
+   `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`.
+
+3. The coverage results of JUnit and instrumentation tests will be merged
+   automatically if they are in the same directory.
+
+## How to generate coverage report
+
+1. Now we have generated .exec files already. We can create a JaCoCo HTML/XML/CSV
+   report using `generate_jacoco_report.py`, for example:
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+     --format html \
+     --output-dir /tmp/coverage_report/ \
+     --coverage-dir /tmp/coverage/ \
+     --sources-json-dir out/Debug/ \
+  ```
+   Then an index.html containing coverage info will be created in output directory:
+
+  ```
+  [INFO] Loading execution data file /tmp/coverage/testTitle.exec.
+  [INFO] Loading execution data file /tmp/coverage/testSelected.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToSelect.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToClose.exec.
+  [INFO] Loading execution data file /tmp/coverage/testThumbnail.exec.
+  [INFO] Analyzing 58 classes.
+  ```
+
+2. For XML and CSV reports, we need to specify `--output-file` instead of `--output-dir` since
+   only one file will be generated as XML or CSV report.
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format xml \
+    --output-file /tmp/coverage_report/report.xml \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
+
+   or
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format csv \
+    --output-file /tmp/coverage_report/report.csv \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
diff --git a/src/build/android/docs/java_optimization.md b/src/build/android/docs/java_optimization.md
new file mode 100644
index 0000000..0ba0d50
--- /dev/null
+++ b/src/build/android/docs/java_optimization.md
@@ -0,0 +1,149 @@
+# Optimizing Java Code
+
+This doc describes how Java code is optimized in Chrome on Android and how to
+deal with issues caused by the optimizer. For tips on how to write optimized
+code, see [//docs/speed/binary_size/optimization_advice.md#optimizing-java-code](/docs/speed/binary_size/optimization_advice.md#optimizing-java-code).
+
+[TOC]
+
+## ProGuard vs R8
+
+ProGuard is the original open-source tool used by many Android applications to
+perform whole-program bytecode optimization. [R8](https://r8.googlesource.com/r8),
+is a re-implementation that is used by Chrome (and the default for Android Studio).
+The terms "ProGuard" and "R8" are used interchangeably within Chromium but
+generally they're meant to refer to the tool providing Java code optimizations.
+
+## What does ProGuard do?
+
+1. Shrinking: ProGuard will remove unused code. This is especially useful
+   when depending on third party libraries where only a few functions are used.
+
+2. Obfuscation: ProGuard will rename classes/fields/methods to use shorter
+   names. Obfuscation is used for minification purposes only (not security).
+
+3. Optimization: ProGuard performs a series of optimizations to shrink code
+   further through various approaches (ex. inlining, outlining, class merging,
+   etc).
+
+## Build Process
+
+ProGuard is enabled only for release builds of Chrome because it is a slow build
+step and breaks Java debugging. It can also be enabled manually via the GN arg:
+```is_java_debug = false```
+
+### ProGuard configuration files
+
+Most GN Java targets can specify ProGuard configuration files by setting the
+`proguard_configs` variable. [//base/android/proguard](/base/android/proguard)
+contains common flags shared by most Chrome applications.
+
+### GN build rules
+
+When `is_java_debug = false` and a target has enabled ProGuard, the `proguard`
+step generates the `.dex` files for the application. The `proguard` step takes
+as input a list of `.jar` files, runs R8/ProGuard on those `.jar` files, and
+produces the final `.dex` file(s) that will be packaged into your `.apk`
+
+## Deobfuscation
+
+Obfuscation can be turned off for local builds while leaving ProGuard enabled
+by setting `enable_proguard_obfuscation = false` in GN args.
+
+There are two main methods for deobfuscating Java stack traces locally:
+1. Using APK wrapper scripts (stacks are automatically deobfuscated)
+  * `$OUT/bin/chrome_public_apk logcat`  # Run adb logcat
+  * `$OUT/bin/chrome_public_apk run`  # Launch chrome and run adb logcat
+
+2. Using `java_deobfuscate`
+  * build/android/stacktrace/java_deobfuscate.py $OUT/apks/ChromePublic.apk.mapping < logcat.txt`
+    * ProGuard mapping files are located beside APKs (ex.
+      `$OUT/apks/ChromePublic.apk` and `$OUT/apks/ChromePublic.apk.mapping`)
+
+Helpful links for deobfuscation:
+
+* [Internal bits about how mapping files are archived][proguard-site]
+* [More detailed deobfuscation instructions][proguard-doc]
+* [Script for deobfuscating official builds][deob-official]
+
+[proguard-site]: http://goto.google.com/chrome-android-proguard
+[proguard-doc]: http://goto.google.com/chromejavadeobfuscation
+[deob-official]: http://goto.google.com/chrome-android-official-deobfuscation
+
+## Debugging common failures
+
+ProGuard failures are often hard to debug. This section aims to outline some of
+the more common errors.
+
+### Classes expected to be discarded
+
+The `-checkdiscard` directive can be used to ensure that certain items are
+removed by ProGuard. A common use of `-checkdiscard` it to ensure that ProGuard
+optimizations do not regress in their ability to remove code, such as code
+intended only for debug builds, or generated JNI classes that are meant to be
+zero-overhead abstractions. Annotating a class with
+[@CheckDiscard][checkdiscard] will add a `-checkdiscard` rule automatically.
+
+[checkdiscard]: /base/android/java/src/org/chromium/base/annotations/CheckDiscard.java
+
+```
+Item void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>() was not discarded.
+void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>()
+|- is invoked from:
+|  void org.chromium.base.library_loader.LibraryPrefetcher.asyncPrefetchLibrariesToMemory()
+... more code path lines
+|- is referenced in keep rule:
+|  obj/chrome/android/chrome_public_apk/chrome_public_apk.resources.proguard.txt:104:1
+
+Error: Discard checks failed.
+```
+
+Things to check
+  * Did you add code that is referenced by code path in the error message?
+  * If so, check the original class for why the `CheckDiscard` was added
+    originally and verify that the reason is still valid with your change (may
+    need git blame to do this).
+  * Try the extra debugging steps listed in the JNI section below.
+
+### JNI wrapper classes not discarded
+
+Proxy native methods (`@NativeMethods`) use generated wrapper classes to provide
+access to native methods. We rely on ProGuard to fully optimize the generated
+code so that native methods aren't a source of binary size bloat. The above
+error message is an example when a JNI wrapper class wasn't discarded (notice
+the name of the offending class).
+  * The ProGuard rule pointed to in the error message isn't helpful (just tells
+    us a code path that reaches the not-inlined class).
+  * Common causes:
+    * Caching the result of `ClassNameJni.get()` in a member variable.
+    * Passing a native wrapper method reference instead of using a lambda (i.e.
+      `Jni.get()::methodName` vs. `() -> Jni.get.methodName()`).
+  * For more debugging info, add to `base/android/proguard/chromium_code.flags`:
+      ```
+      -whyareyounotinlining class org.chromium.base.library_loader.LibraryPrefetcherJni {
+          <init>();
+      }
+      ```
+
+### Duplicate classes
+
+```
+Type YourClassName is defined multiple times: obj/jar1.jar:YourClassName.class, obj/jar2.jar:YourClassName.class
+```
+
+Common causes:
+  * Multiple targets with overlapping `srcjar_deps`:
+    * Each `.srcjar` can only be depended on by a single Java target in any
+      given APK target. `srcjar_deps` are just a convenient way to depend on
+      generated files and should be treated like source files rather than
+      `deps`.
+    * Solution: Wrap the `srcjar` in an `android_library` target or have only a
+      single Java target depend on the `srcjar` and have other targets depend on
+      the containing Java target instead.
+  * Accidentally enabling APK level generated files for multiple targets that
+    share generated code (ex. Trichrome or App Bundles):
+    * Solution: Make sure the generated file is only added once.
+
+Debugging ProGuard failures isn't easy, so please message java@chromium.org
+or [file a bug](crbug.com/new) with `component=Build os=Android` for any
+issues related to Java code optimization.
diff --git a/src/build/android/docs/java_toolchain.md b/src/build/android/docs/java_toolchain.md
new file mode 100644
index 0000000..ef11548
--- /dev/null
+++ b/src/build/android/docs/java_toolchain.md
@@ -0,0 +1,284 @@
+# Chromium's Java Toolchain
+
+This doc aims to describe the Chrome build process that takes a set of `.java`
+files and turns them into a `classes.dex` file.
+
+[TOC]
+
+## Core GN Target Types
+
+The following have `supports_android` and `requires_android` set to false by
+default:
+* `java_library()`: Compiles `.java` -> `.jar`
+* `java_prebuilt()`:  Imports a prebuilt `.jar` file.
+
+The following have `supports_android` and `requires_android` set to true. They
+also have a default `jar_excluded_patterns` set (more on that later):
+* `android_library()`
+* `android_java_prebuilt()`
+
+All target names must end with "_java" so that the build system can distinguish
+them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)).
+
+Most targets produce two separate `.jar` files:
+* Device `.jar`: Used to produce `.dex.jar`, which is used on-device.
+* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`).
+  * Host `.jar` files live in `lib.java/` so that they are archived in
+    builder/tester bots (which do not archive `obj/`).
+
+## From Source to Final Dex
+
+### Step 1: Create interface .jar with turbine or ijar
+
+For prebuilt `.jar` files, use [//third_party/ijar] to create interface `.jar`
+from prebuilt `.jar`.
+
+For non-prebuilt targets, use [//third_party/turbine] to create interface `.jar`
+from `.java` source files. Turbine is much faster than javac, and so enables
+full compilation to happen more concurrently.
+
+What are interface jars?:
+
+* The contain `.class` files with all non-public symbols and function bodies
+  removed.
+* Dependant targets use interface `.jar` files to skip having to be rebuilt
+  when only private implementation details change.
+
+[//third_party/ijar]: /third_party/ijar/README.chromium
+[//third_party/turbine]: /third_party/turbine/README.chromium
+
+### Step 2a: Compile with javac
+
+This step is the only step that does not apply to prebuilt targets.
+
+* All `.java` files in a target are compiled by `javac` into `.class` files.
+  * This includes `.java` files that live within `.srcjar` files, referenced
+    through `srcjar_deps`.
+* The `classpath` used when compiling a target is comprised of `.jar` files of
+  its deps.
+  * When deps are library targets, the Step 1 `.jar` file is used.
+  * When deps are prebuilt targets, the original `.jar` file is used.
+  * All `.jar` processing done in subsequent steps does not impact compilation
+    classpath.
+* `.class` files are zipped into an output `.jar` file.
+* There is **no support** for incremental compilation at this level.
+  * If one source file changes within a library, then the entire library is
+    recompiled.
+  * Prefer smaller targets to avoid slow compiles.
+
+### Step 2b: Compile with ErrorProne
+
+This step can be disabled via GN arg: `use_errorprone_java_compiler = false`
+
+* Concurrently with step 1a: [ErrorProne] compiles java files and checks for bug
+  patterns, including some [custom to Chromium][ep_plugins].
+* ErrorProne used to replace step 1a, but was changed to a concurrent step after
+  being identified as being slower.
+
+[ErrorProne]: https://errorprone.info/
+[ep_plugins]: /tools/android/errorprone_plugin/
+
+### Step 3: Desugaring (Device .jar Only)
+
+This step happens only when targets have `supports_android = true`. It is not
+applied to `.jar` files used by `junit_binary`.
+
+* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as
+  lambdas and default interface methods, into constructs that are compatible
+  with Java 7.
+
+### Step 4: Instrumenting (Device .jar Only)
+
+This step happens only when this GN arg is set: `use_jacoco_coverage = true`
+
+* [Jacoco] adds instrumentation hooks to methods.
+
+[Jacoco]: https://www.eclemma.org/jacoco/
+
+### Step 5: Filtering
+
+This step happens only when targets that have `jar_excluded_patterns` or
+`jar_included_patterns` set (e.g. all `android_` targets).
+
+* Remove `.class` files that match the filters from the `.jar`. These `.class`
+  files are generally those that are re-created with different implementations
+  further on in the build process.
+  * E.g.: `R.class` files - a part of [Android Resources].
+  * E.g.: `GEN_JNI.class` - a part of our [JNI] glue.
+  * E.g.: `AppHooksImpl.class` - how `chrome_java` wires up different
+    implementations for [non-public builds][apphooks].
+
+[JNI]: /base/android/jni_generator/README.md
+[Android Resources]: life_of_a_resource.md
+[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java
+
+### Step 6: Per-Library Dexing
+
+This step happens only when targets have `supports_android = true`.
+
+* [d8] converts `.jar` files containing `.class` files into `.dex.jar` files
+  containing `classes.dex` files.
+* Dexing is incremental - it will reuse dex'ed classes from a previous build if
+  the corresponding `.class` file is unchanged.
+* These per-library `.dex.jar` files are used directly by [incremental install],
+  and are inputs to the Apk step when `enable_proguard = false`.
+  * Even when `is_java_debug = false`, many apk targets do not enable ProGuard
+    (e.g. unit tests).
+
+[d8]: https://developer.android.com/studio/command-line/d8
+[incremental install]: /build/android/incremental_install/README.md
+
+### Step 7: Apk / Bundle Module Compile
+
+* Each `android_apk` and `android_bundle_module` template has a nested
+  `java_library` target. The nested library includes final copies of files
+  stripped out by prior filtering steps. These files include:
+  * Final `R.java` files, created by `compile_resources.py`.
+  * Final `GEN_JNI.java` for [JNI glue].
+  * `BuildConfig.java` and `NativeLibraries.java` (//base dependencies).
+
+[JNI glue]: /base/android/jni_generator/README.md
+
+### Step 8: Final Dexing
+
+This step is skipped when building using [Incremental Install].
+
+When `is_java_debug = true`:
+* [d8] merges all library `.dex.jar` files into a final `.mergeddex.jar`.
+
+When `is_java_debug = false`:
+* [R8] performs whole-program optimization on all library `lib.java` `.jar`
+  files and outputs a final `.r8dex.jar`.
+  * For App Bundles, R8 creates a `.r8dex.jar` for each module.
+
+[Incremental Install]: /build/android/incremental_install/README.md
+[R8]: https://r8.googlesource.com/r8
+
+## Test APKs with apk_under_test
+
+Test APKs are normal APKs that contain an `<instrumentation>` tag within their
+`AndroidManifest.xml`. If this tag specifies an `android:targetPackage`
+different from itself, then Android will add that package's `classes.dex` to the
+test APK's Java classpath when run. In GN, you can enable this behavior using
+the `apk_under_test` parameter on `instrumentation_test_apk` targets. Using it
+is discouraged if APKs have `proguard_enabled=true`.
+
+### Difference in Final Dex
+
+When `enable_proguard=false`:
+* Any library depended on by the test APK that is also depended on by the
+  apk-under-test is excluded from the test APK's final dex step.
+
+When `enable_proguard=true`:
+* Test APKs cannot make use of the apk-under-test's dex because only symbols
+  explicitly kept by `-keep` directives are guaranteed to exist after
+  ProGuarding. As a work-around, test APKs include all of the apk-under-test's
+  libraries directly in its own final dex such that the under-test apk's Java
+  code is never used (because it is entirely shadowed by the test apk's dex).
+  * We've found this configuration to be fragile, and are trying to [move away
+    from it](https://bugs.chromium.org/p/chromium/issues/detail?id=890452).
+
+### Difference in GEN_JNI.java
+* Calling native methods using [JNI glue] requires that a `GEN_JNI.java` class
+  be generated that contains all native methods for an APK. There cannot be
+  conflicting `GEN_JNI` classes in both the test apk and the apk-under-test, so
+  only the apk-under-test has one generated for it. As a result this,
+  instrumentation test APKs that use apk-under-test cannot use native methods
+  that aren't already part of the apk-under-test.
+
+## How to Generate Java Source Code
+There are two ways to go about generating source files: Annotation Processors
+and custom build steps.
+
+### Annotation Processors
+* These are run by `javac` as part of the compile step.
+* They **cannot** modify the source files that they apply to. They can only
+  generate new sources.
+* Use these when:
+  * an existing Annotation Processor does what you want
+    (E.g. Dagger, AutoService, etc.), or
+  * you need to understand Java types to do generation.
+
+### Custom Build Steps
+* These use discrete build actions to generate source files.
+  * Some generate `.java` directly, but most generate a zip file of sources
+    (called a `.srcjar`) to simplify the number of inputs / outputs.
+* Examples of existing templates:
+  * `jinja_template`: Generates source files using [Jinja].
+  * `java_cpp_template`: Generates source files using the C preprocessor.
+  * `java_cpp_enum`: Generates `@IntDef`s based on enums within `.h` files.
+  * `java_cpp_strings`: Generates String constants based on strings defined in
+    `.cc` files.
+* Custom build steps are preferred over Annotation Processors because they are
+  generally easier to understand, and can run in parallel with other steps
+  (rather than being tied to compiles).
+
+[Jinja]: https://palletsprojects.com/p/jinja/
+
+## Static Analysis & Code Checks
+
+We use several tools for static analysis.
+
+### [ErrorProne](https://errorprone.info/)
+* Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`.
+* Most useful check:
+  * Enforcement of `@GuardedBy` annotations.
+* List of enabled / disabled checks exists [within javac.py](https://cs.chromium.org/chromium/src/build/android/gyp/javac.py?l=30)
+  * Many checks are currently disabled because there is work involved in fixing
+    violations they introduce. Please help!
+* Custom checks for Chrome:
+  * [//tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/](/tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/)
+* Use ErrorProne checks when you need something more sophisticated than pattern
+  matching.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+### [Android Lint](https://developer.android.com/studio/write/lint)
+* Runs as part of normal compilation. Controlled by GN arg: `disable_android_lint` 
+* Most useful check:
+  * Enforcing `@TargetApi` annotations (ensure you don't call a function that
+    does not exist on all versions of Android unless guarded by an version
+    check).
+* List of disabled checks:
+  * [//build/android/lint/suppressions.xml](/build/android/lint/suppressions.xml)
+* Custom lint checks [are possible](lint_plugins), but we don't have any.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+[lint_plugins]: http://tools.android.com/tips/lint-custom-rules
+
+### [Bytecode Processor](/build/android/bytecode/)
+* Performs a single check:
+  * That target `deps` are not missing any entries.
+  * In other words: Enforces that targets do not rely on indirect dependencies
+    to populate their classpath.
+* Checks run on the entire codebase, not only on changed lines.
+
+### [PRESUBMIT.py](/PRESUBMIT.py):
+* Checks for banned patterns via `_BANNED_JAVA_FUNCTIONS`.
+  * (These should likely be moved to checkstyle).
+* Checks for a random set of things in `ChecksAndroidSpecificOnUpload()`.
+  * Including running Checkstyle.
+  * (Some of these other checks should likely also be moved to checkstyle).
+* Checks run only on changed lines.
+
+### [Checkstyle](https://checkstyle.sourceforge.io/)
+* Checks Java style rules that are not covered by clang-format.
+  * E.g.: Unused imports and naming conventions.
+* Allows custom checks to be added via XML. Here [is ours].
+* Preferred over adding checks directly in PRESUBMIT.py because the tool
+  understands `@SuppressWarnings` annotations.
+* Checks run only on changed lines.
+
+[is ours]:  /tools/android/checkstyle/chromium-style-5.0.xml
+
+### [clang-format](https://clang.llvm.org/docs/ClangFormat.html)
+* Formats `.java` files via `git cl format`.
+* Can be toggle on/off with code comments.
+  ```java
+  // clang-format off
+  ... non-formatted code here ...
+  // clang-format on
+  ```
+* Does not work great for multiple annotations or on some lambda expressions,
+  but is generally agreed it is better than not having it at all.
diff --git a/src/build/android/docs/life_of_a_resource.md b/src/build/android/docs/life_of_a_resource.md
new file mode 100644
index 0000000..3aacd5e
--- /dev/null
+++ b/src/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,260 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+  * Including AndroidManifest.xml files from libraries, which get merged
+    together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+  target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+  * AndroidManifest.xml (as binary xml)
+  * resources.arsc
+  * res/**
+* Final R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+  other resources will now use the id rather than the name for faster lookup at
+  runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+  the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+  dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+  other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+  using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+  resources as well as the name and path of non-string resources (ie. layouts
+  and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+  with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [allowlisted](#adding-resources-to-the-allowlist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the allowlist
+
+If a resource is accessed via `getIdentifier()` it needs to be allowed by an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_paths` variable. To add a resource to the allowlist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it calls this [R file's][Base Module R.java File]
+onResourcesLoaded function to have the correct package id. When deobfuscating
+webview resource ids, disregard the first two bytes in the id when looking it up
+in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[Base Module R.java File]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/system_webview_apk/generated_java/gen/base_module/R.java
+
+## How R.java files are generated
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk.
+
+There are three types of R.java files in Chrome.
+1. Base Module Root R.java Files
+2. DFM Root R.java Files
+3. Source R.java Files
+
+Example Base Module Root R.java File
+```java
+package gen.base_module;
+
+public final class R {
+    public static class anim  {
+        public static final int abc_fade_in = 0x7f010000;
+        public static final int abc_fade_out = 0x7f010001;
+        public static final int abc_slide_in_top = 0x7f010007;
+    }
+    public static class animator  {
+        public static final int design_appbar_state_list_animator = 0x7f020000;
+    }
+}
+```
+Base module root R.java files contain base android resources. All R.java files
+can access base module resources through inheritance.
+
+Example DFM Root R.java File
+```java
+package gen.vr_module;
+
+public final class R {
+    public static class anim extends gen.base_module.R.anim {
+    }
+    public static class animator extends gen.base_module.R.animator  {
+        public static final int design_appbar_state_list_animator = 0x7f030000;
+    }
+}
+```
+DFM root R.java files extend base module root R.java files. This allows DFMs to
+access their own resources as well as the base module's resources.
+
+Example Source R.java File
+```java
+package org.chromium.chrome.vr;
+
+public final class R {
+    public static final class anim extends
+            gen.base_module.R.anim {}
+    public static final class animator extends
+            gen.base_module.R.animator {}
+}
+```
+Source R.java files extend root R.java files and have no resources of their own.
+Developers can import these R.java files to access resources in the apk.
+
+The R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/src/build/android/docs/lint.md b/src/build/android/docs/lint.md
new file mode 100644
index 0000000..4ba13d7
--- /dev/null
+++ b/src/build/android/docs/lint.md
@@ -0,0 +1,140 @@
+# Lint
+
+Android's [**lint**](https://developer.android.com/tools/help/lint.html) is a
+static analysis tool that Chromium uses to catch possible issues in Java code.
+
+This is a list of [**checks**](http://tools.android.com/tips/lint-checks) that
+you might encounter.
+
+[TOC]
+
+## How Chromium uses lint
+
+Chromium only runs lint on apk or bundle targets that explicitly set
+`enable_lint = true`. Some example targets that have this set are:
+
+ - `//chrome/android:monochrome_public_bundle`
+ - `//android_webview/support_library/boundary_interfaces:boundary_interface_example_apk`
+ - `//remoting/android:remoting_apk`
+
+## My code has a lint error
+
+If lint reports an issue in your code, there are several possible remedies.
+In descending order of preference:
+
+### Fix it
+
+While this isn't always the right response, fixing the lint error or warning
+should be the default.
+
+### Suppress it locally
+
+Java provides an annotation,
+[`@SuppressWarnings`](https://developer.android.com/reference/java/lang/SuppressWarnings),
+that tells lint to ignore the annotated element. It can be used on classes,
+constructors, methods, parameters, fields, or local variables, though usage in
+Chromium is typically limited to the first three. You do not need to import it
+since it is in the `java.lang` package.
+
+Like many suppression annotations, `@SuppressWarnings` takes a value that tells
+**lint** what to ignore. It can be a single `String`:
+
+```java
+@SuppressWarnings("NewApi")
+public void foo() {
+    a.methodThatRequiresHighSdkLevel();
+}
+```
+
+It can also be a list of `String`s:
+
+```java
+@SuppressWarnings({
+        "NewApi",
+        "UseSparseArrays"
+        })
+public Map<Integer, FakeObject> bar() {
+    Map<Integer, FakeObject> shouldBeASparseArray = new HashMap<Integer, FakeObject>();
+    another.methodThatRequiresHighSdkLevel(shouldBeASparseArray);
+    return shouldBeASparseArray;
+}
+```
+
+For resource xml files you can use `tools:ignore`:
+
+```xml
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:tools="http://schemas.android.com/tools">
+    <!-- TODO(crbug/###): remove tools:ignore once these colors are used -->
+    <color name="hi" tools:ignore="NewApi,UnusedResources">@color/unused</color>
+</resources>
+```
+
+The examples above are the recommended ways of suppressing lint warnings.
+
+### Suppress it in a `lint-suppressions.xml` file
+
+**lint** can be given a per-target XML configuration file containing warnings or
+errors that should be ignored. Each target defines its own configuration file
+via the `lint_suppressions_file` gn variable. It is usually defined near its
+`enable_lint` gn variable.
+
+These suppressions files should only be used for temporarily ignoring warnings
+that are too hard (or not possible) to suppress locally, and permanently
+ignoring warnings only for this target. To permanently ignore a warning for all
+targets, add the warning to the `_DISABLED_ALWAYS` list in
+[build/android/gyp/lint.py](https://source.chromium.org/chromium/chromium/src/+/master:build/android/gyp/lint.py).
+Disabling globally makes lint a bit faster.
+
+The exception to the above rule is for warnings that affect multiple languages.
+Feel free to suppress those in lint-suppressions.xml files since it is not
+practical to suppress them in each language file and it is a lot of extra bloat
+to list out every language for every violation in lint-baseline.xml files.
+
+Here is an example of how to structure a suppressions XML file:
+
+```xml
+<?xml version="1.0" encoding="utf-8" ?>
+<lint>
+  <!-- Chrome is a system app. -->
+  <issue id="ProtectedPermissions" severity="ignore"/>
+  <issue id="UnusedResources">
+    <!-- 1 raw resources are accessed by URL in various places. -->
+    <ignore regexp="gen/remoting/android/.*/res/raw/credits.*"/>
+    <!-- TODO(crbug.com/###): Remove the following line.  -->
+    <ignore regexp="The resource `R.string.soon_to_be_used` appears to be unused"/>
+  </issue>
+</lint>
+```
+
+## What are `lint-baseline.xml` files for?
+
+Baseline files are to help us introduce new lint warnings and errors without
+blocking on fixing all our existing code that violate these new errors. Since
+they are generated files, they should **not** be used to suppress lint warnings.
+One of the approaches above should be used instead. Eventually all the errors in
+baseline files should be either fixed or ignored permanently.
+
+The following are some common scenarios where you may need to update baseline
+files.
+
+### I updated `cmdline-tools` and now there are tons of new errors!
+
+This happens every time lint is updated, since lint is provided by
+`cmdline-tools`.
+
+Baseline files are defined via the `lint_baseline_file` gn variable. It is
+usually defined near a target's `enable_lint` gn variable. To regenerate the
+baseline file, delete it and re-run the lint target. The command will fail, but
+the baseline file will have been generated.
+
+This may need to be repeated for all targets that have set `enable_lint = true`,
+including downstream targets. Downstream baseline files should be updated and
+first to avoid build breakages. Each target has its own `lint_baseline_file`
+defined and so all these files can be removed and regenerated as needed.
+
+### I updated `library X` and now there are tons of new errors!
+
+This is usually because `library X`'s aar contains custom lint checks and/or
+custom annotation definition. Follow the same procedure as updates to
+`cmdline-tools`.
diff --git a/src/build/android/download_doclava.py b/src/build/android/download_doclava.py
new file mode 100755
index 0000000..1982fdb
--- /dev/null
+++ b/src/build/android/download_doclava.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Minimal tool to download doclava from Google storage when building for
+Android."""
+
+import os
+import subprocess
+import sys
+
+
+def main():
+  # Some Windows bots inadvertently have third_party/android_sdk installed,
+  # but are unable to run download_from_google_storage because depot_tools
+  # is not in their path, so avoid failure and bail.
+  if sys.platform == 'win32':
+    return 0
+  subprocess.check_call([
+      'download_from_google_storage',
+      '--no_resume',
+      '--no_auth',
+      '--bucket', 'chromium-doclava',
+      '--extract',
+      '-s',
+      os.path.join(os.path.dirname(__file__), '..', '..', 'buildtools',
+                   'android', 'doclava.tar.gz.sha1')])
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/dump_apk_resource_strings.py b/src/build/android/dump_apk_resource_strings.py
new file mode 100755
index 0000000..8417e29
--- /dev/null
+++ b/src/build/android/dump_apk_resource_strings.py
@@ -0,0 +1,664 @@
+#!/usr/bin/env vpython
+# encoding: utf-8
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A script to parse and dump localized strings in resource.arsc files."""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import cProfile
+import os
+import re
+import subprocess
+import sys
+import zipfile
+
+# pylint: disable=bare-except
+
+# Assuming this script is located under build/android, try to import
+# build/android/gyp/bundletool.py to get the default path to the bundletool
+# jar file. If this fail, using --bundletool-path will be required to parse
+# bundles, allowing this script to be relocated or reused somewhere else.
+try:
+  sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp'))
+  import bundletool
+
+  _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH
+except:
+  _DEFAULT_BUNDLETOOL_PATH = None
+
+# Try to get the path of the aapt build tool from catapult/devil.
+try:
+  import devil_chromium  # pylint: disable=unused-import
+  from devil.android.sdk import build_tools
+  _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt')
+except:
+  _AAPT_DEFAULT_PATH = None
+
+
+def AutoIndentStringList(lines, indentation=2):
+  """Auto-indents a input list of text lines, based on open/closed braces.
+
+  For example, the following input text:
+
+    'Foo {',
+    'Bar {',
+    'Zoo',
+    '}',
+    '}',
+
+  Will return the following:
+
+    'Foo {',
+    '  Bar {',
+    '    Zoo',
+    '  }',
+    '}',
+
+  The rules are pretty simple:
+    - A line that ends with an open brace ({) increments indentation.
+    - A line that starts with a closing brace (}) decrements it.
+
+  The main idea is to make outputting structured text data trivial,
+  since it can be assumed that the final output will be passed through
+  this function to make it human-readable.
+
+  Args:
+    lines: an iterator over input text lines. They should not contain
+      line terminator (e.g. '\n').
+  Returns:
+    A new list of text lines, properly auto-indented.
+  """
+  margin = ''
+  result = []
+  # NOTE: Intentional but significant speed optimizations in this function:
+  #   - |line and line[0] == <char>| instead of |line.startswith(<char>)|.
+  #   - |line and line[-1] == <char>| instead of |line.endswith(<char>)|.
+  for line in lines:
+    if line and line[0] == '}':
+      margin = margin[:-indentation]
+    result.append(margin + line)
+    if line and line[-1] == '{':
+      margin += ' ' * indentation
+
+  return result
+
+
+# pylint: disable=line-too-long
+
+# NOTE: aapt dump will quote the following characters only: \n, \ and "
+# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270
+
+# pylint: enable=line-too-long
+
+
+def UnquoteString(s):
+  """Unquote a given string from aapt dump.
+
+  Args:
+    s: An UTF-8 encoded string that contains backslashes for quotes, as found
+      in the output of 'aapt dump resources --values'.
+  Returns:
+    The unquoted version of the input string.
+  """
+  if not '\\' in s:
+    return s
+
+  result = ''
+  start = 0
+  size = len(s)
+  while start < size:
+    pos = s.find('\\', start)
+    if pos < 0:
+      break
+
+    result += s[start:pos]
+    count = 1
+    while pos + count < size and s[pos + count] == '\\':
+      count += 1
+
+    result += '\\' * (count / 2)
+    start = pos + count
+    if count & 1:
+      if start < size:
+        ch = s[start]
+        if ch == 'n':  # \n is the only non-printable character supported.
+          ch = '\n'
+        result += ch
+        start += 1
+      else:
+        result += '\\'
+
+  result += s[start:]
+  return result
+
+
+assert UnquoteString(r'foo bar') == 'foo bar'
+assert UnquoteString(r'foo\nbar') == 'foo\nbar'
+assert UnquoteString(r'foo\\nbar') == 'foo\\nbar'
+assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar'
+assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar'
+assert UnquoteString(r'foo\\bar') == r'foo\bar'
+
+
+def QuoteString(s):
+  """Quote a given string for external output.
+
+  Args:
+    s: An input UTF-8 encoded string.
+  Returns:
+    A quoted version of the string, using the same rules as 'aapt dump'.
+  """
+  # NOTE: Using repr() would escape all non-ASCII bytes in the string, which
+  # is undesirable.
+  return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n')
+
+
+assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"'
+assert QuoteString('foo\nbar') == 'foo\\nbar'
+
+
+def ReadStringMapFromRTxt(r_txt_path):
+  """Read all string resource IDs and names from an R.txt file.
+
+  Args:
+    r_txt_path: Input file path.
+  Returns:
+    A {res_id -> res_name} dictionary corresponding to the string resources
+    from the input R.txt file.
+  """
+  # NOTE: Typical line of interest looks like:
+  # int string AllowedDomainsForAppsTitle 0x7f130001
+  result = {}
+  prefix = 'int string '
+  with open(r_txt_path) as f:
+    for line in f:
+      line = line.rstrip()
+      if line.startswith(prefix):
+        res_name, res_id = line[len(prefix):].split(' ')
+        result[int(res_id, 0)] = res_name
+  return result
+
+
+class ResourceStringValues(object):
+  """Models all possible values for a named string."""
+
+  def __init__(self):
+    self.res_name = None
+    self.res_values = {}
+
+  def AddValue(self, res_name, res_config, res_value):
+    """Add a new value to this entry.
+
+    Args:
+      res_name: Resource name. If this is not the first time this method
+        is called with the same resource name, then |res_name| should match
+        previous parameters for sanity checking.
+      res_config: Config associated with this value. This can actually be
+        anything that can be converted to a string.
+      res_value: UTF-8 encoded string value.
+    """
+    if res_name is not self.res_name and res_name != self.res_name:
+      if self.res_name is None:
+        self.res_name = res_name
+      else:
+        # Sanity check: the resource name should be the same for all chunks.
+        # Resource ID is redefined with a different name!!
+        print('WARNING: Resource key ignored (%s, should be %s)' %
+              (res_name, self.res_name))
+
+    if self.res_values.setdefault(res_config, res_value) is not res_value:
+      print('WARNING: Duplicate value definition for [config %s]: %s ' \
+            '(already has %s)' % (
+                res_config, res_value, self.res_values[res_config]))
+
+  def ToStringList(self, res_id):
+    """Convert entry to string list for human-friendly output."""
+    values = sorted(
+        [(str(config), value) for config, value in self.res_values.iteritems()])
+    if res_id is None:
+      # res_id will be None when the resource ID should not be part
+      # of the output.
+      result = ['name=%s count=%d {' % (self.res_name, len(values))]
+    else:
+      result = [
+          'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name,
+                                                len(values))
+      ]
+    for config, value in values:
+      result.append('%-16s "%s"' % (config, QuoteString(value)))
+    result.append('}')
+    return result
+
+
+class ResourceStringMap(object):
+  """Convenience class to hold the set of all localized strings in a table.
+
+  Usage is the following:
+     1) Create new (empty) instance.
+     2) Call AddValue() repeatedly to add new values.
+     3) Eventually call RemapResourceNames() to remap resource names.
+     4) Call ToStringList() to convert the instance to a human-readable
+        list of strings that can later be used with AutoIndentStringList()
+        for example.
+  """
+
+  def __init__(self):
+    self._res_map = collections.defaultdict(ResourceStringValues)
+
+  def AddValue(self, res_id, res_name, res_config, res_value):
+    self._res_map[res_id].AddValue(res_name, res_config, res_value)
+
+  def RemapResourceNames(self, id_name_map):
+    """Rename all entries according to a given {res_id -> res_name} map."""
+    for res_id, res_name in id_name_map.iteritems():
+      if res_id in self._res_map:
+        self._res_map[res_id].res_name = res_name
+
+  def ToStringList(self, omit_ids=False):
+    """Dump content to a human-readable string list.
+
+    Note that the strings are ordered by their resource name first, and
+    resource id second.
+
+    Args:
+      omit_ids: If True, do not put resource IDs in the result. This might
+        be useful when comparing the outputs of two different builds of the
+        same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk)
+        where the resource IDs might be slightly different, but not the
+        string contents.
+    Return:
+      A list of strings that can later be sent to AutoIndentStringList().
+    """
+    result = ['Resource strings (count=%d) {' % len(self._res_map)]
+    res_map = self._res_map
+
+    # A small function to compare two (res_id, values) tuples
+    # by resource name first, then resource ID.
+    def cmp_id_name(a, b):
+      result = cmp(a[1].res_name, b[1].res_name)
+      if result == 0:
+        result = cmp(a[0], b[0])
+      return result
+
+    for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name):
+      result += res_map[res_id].ToStringList(None if omit_ids else res_id)
+    result.append('}  # Resource strings')
+    return result
+
+
+@contextlib.contextmanager
+def ManagedOutput(output_file):
+  """Create an output File object that will be closed on exit if necessary.
+
+  Args:
+    output_file: Optional output file path.
+  Yields:
+    If |output_file| is empty, this simply yields sys.stdout. Otherwise, this
+    opens the file path for writing text, and yields its File object. The
+    context will ensure that the object is always closed on scope exit.
+  """
+  close_output = False
+  if output_file:
+    output = open(output_file, 'wt')
+    close_output = True
+  else:
+    output = sys.stdout
+  try:
+    yield output
+  finally:
+    if close_output:
+      output.close()
+
+
+@contextlib.contextmanager
+def ManagedPythonProfiling(enable_profiling, sort_key='tottime'):
+  """Enable Python profiling if needed.
+
+  Args:
+    enable_profiling: Boolean flag. True to enable python profiling.
+    sort_key: Sorting key for the final stats dump.
+  Yields:
+    If |enable_profiling| is False, this yields False. Otherwise, this
+    yields a new Profile instance just after enabling it. The manager
+    ensures that profiling stops and prints statistics on scope exit.
+  """
+  pr = None
+  if enable_profiling:
+    pr = cProfile.Profile()
+    pr.enable()
+  try:
+    yield pr
+  finally:
+    if pr:
+      pr.disable()
+      pr.print_stats(sort=sort_key)
+
+
+def IsFilePathABundle(input_file):
+  """Return True iff |input_file| holds an Android app bundle."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo('BundleConfig.pb')
+      return True
+  except:
+    return False
+
+
+# Example output from 'bundletool dump resources --values' corresponding
+# to strings:
+#
+# 0x7F1200A0 - string/abc_action_menu_overflow_description
+#         (default) - [STR] "More options"
+#         locale: "ca" - [STR] "Més opcions"
+#         locale: "da" - [STR] "Flere muligheder"
+#         locale: "fa" - [STR] " گزینه<U+200C>های بیشتر"
+#         locale: "ja" - [STR] "その他のオプション"
+#         locale: "ta" - [STR] "மேலும் விருப்பங்கள்"
+#         locale: "nb" - [STR] "Flere alternativer"
+#         ...
+#
+# Fun fact #1: Bundletool uses <lang>-<REGION> instead of <lang>-r<REGION>
+#              for locales!
+#
+# Fun fact #2: The <U+200C> is terminal output for \u200c, the output is
+#              really UTF-8 encoded when it is read by this script.
+#
+# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0.
+#
+_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile(
+    r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$')
+assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match(
+    '0x7F1200A0 - string/abc_action_menu_overflow_description')
+
+_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile(
+    r'^\s+\(default\) - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"').group(1) == "More options"
+
+_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile(
+    r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(
+    u'        locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8'))
+
+
+def ParseBundleResources(bundle_tool_jar_path, bundle_path):
+  """Use bundletool to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [
+      'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle',
+      bundle_path, '--values'
+  ]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+  res_map = ResourceStringMap()
+  current_resource_id = None
+  current_resource_name = None
+  keep_parsing = True
+  need_value = False
+  while keep_parsing:
+    line = p.stdout.readline()
+    if not line:
+      break
+    # Do not use rstrip(), since this should only remove trailing newlines
+    # but not trailing whitespace that happen to be embedded in the string
+    # value for some reason.
+    line = line.rstrip('\n\r')
+    m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line)
+    if m:
+      current_resource_id = int(m.group(1), 16)
+      current_resource_name = m.group(2)
+      need_value = True
+      continue
+
+    if not need_value:
+      continue
+
+    resource_config = None
+    m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line)
+    if m:
+      resource_config = 'config (default)'
+      resource_value = m.group(1)
+    else:
+      m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line)
+      if m:
+        resource_config = 'config %s' % m.group(1)
+        resource_value = m.group(2)
+
+    if resource_config is None:
+      need_value = False
+      continue
+
+    res_map.AddValue(current_resource_id, current_resource_name,
+                     resource_config, UnquoteString(resource_value))
+  return res_map
+
+
+# Name of the binary resources table file inside an APK.
+RESOURCES_FILENAME = 'resources.arsc'
+
+
+def IsFilePathAnApk(input_file):
+  """Returns True iff a ZipFile instance is for a regular APK."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo(RESOURCES_FILENAME)
+      return True
+  except:
+    return False
+
+
+# pylint: disable=line-too-long
+
+# Example output from 'aapt dump resources --values' corresponding
+# to strings:
+#
+#      config zh-rHK
+#        resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)
+#          (string8) "瀏覽首頁"
+#        resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00)
+#          (string8) "向上瀏覽"
+#
+
+# The following are compiled regular expressions used to recognize each
+# of line and extract relevant information.
+#
+_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$')
+assert _RE_AAPT_CONFIG.match('   config (default):')
+assert _RE_AAPT_CONFIG.match('   config zh-rTW:')
+
+# Match an ISO 639-1 or ISO 639-2 locale.
+_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$')
+assert _RE_AAPT_ISO_639_LOCALE.match('de')
+assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW')
+assert _RE_AAPT_ISO_639_LOCALE.match('fil')
+assert not _RE_AAPT_ISO_639_LOCALE.match('land')
+
+_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn')
+assert _RE_AAPT_BCP47_LOCALE.match('b+en+US')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+1234')
+
+_RE_AAPT_STRING_RESOURCE_HEADER = re.compile(
+    r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$')
+assert _RE_AAPT_STRING_RESOURCE_HEADER.match(
+    r'  resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)'
+)
+
+_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$')
+assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r'       (string8) "瀏覽首頁"')
+
+# pylint: enable=line-too-long
+
+
+def _ConvertAaptLocaleToBcp47(locale):
+  """Convert a locale name from 'aapt dump' to its BCP-47 form."""
+  if locale.startswith('b+'):
+    return '-'.join(locale[2:].split('+'))
+  lang, _, region = locale.partition('-r')
+  if region:
+    return '%s-%s' % (lang, region)
+  return lang
+
+
+assert _ConvertAaptLocaleToBcp47('(default)') == '(default)'
+assert _ConvertAaptLocaleToBcp47('en') == 'en'
+assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('fil') == 'fil'
+assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn'
+
+
+def ParseApkResources(aapt_path, apk_path):
+  """Use aapt to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+
+  res_map = ResourceStringMap()
+  current_locale = None
+  current_resource_id = None
+  current_resource_name = None
+  need_value = False
+  while True:
+    line = p.stdout.readline().rstrip()
+    if not line:
+      break
+    m = _RE_AAPT_CONFIG.match(line)
+    if m:
+      locale = None
+      aapt_locale = m.group(1)
+      if aapt_locale == '(default)':
+        locale = aapt_locale
+      elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      if locale is not None:
+        current_locale = _ConvertAaptLocaleToBcp47(locale)
+      continue
+
+    if current_locale is None:
+      continue
+
+    if need_value:
+      m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line)
+      if not m:
+        # Should not happen
+        sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' %
+                         (current_resource_id, current_resource_name))
+        resource_value = '<MISSING_STRING_%08x>' % current_resource_id
+      else:
+        resource_value = UnquoteString(m.group(1))
+
+      res_map.AddValue(current_resource_id, current_resource_name,
+                       'config %s' % current_locale, resource_value)
+      need_value = False
+    else:
+      m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line)
+      if m:
+        current_resource_id = int(m.group(1), 16)
+        current_resource_name = m.group(2)
+        need_value = True
+
+  return res_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      'input_file',
+      help='Input file path. This can be either an APK, or an app bundle.')
+  parser.add_argument('--output', help='Optional output file path.')
+  parser.add_argument(
+      '--omit-ids',
+      action='store_true',
+      help='Omit resource IDs in the output. This is useful '
+      'to compare the contents of two distinct builds of the '
+      'same APK.')
+  parser.add_argument(
+      '--aapt-path',
+      default=_AAPT_DEFAULT_PATH,
+      help='Path to aapt executable. Optional for APKs.')
+  parser.add_argument(
+      '--r-txt-path',
+      help='Path to an optional input R.txt file used to translate resource '
+      'IDs to string names. Useful when resources names in the input files '
+      'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used '
+      'automatically by this script.')
+  parser.add_argument(
+      '--bundletool-path',
+      default=_DEFAULT_BUNDLETOOL_PATH,
+      help='Path to alternate bundletool .jar file. Only used for bundles.')
+  parser.add_argument(
+      '--profile', action='store_true', help='Enable Python profiling.')
+
+  options = parser.parse_args(args)
+
+  # Create a {res_id -> res_name} map for unobfuscation, if needed.
+  res_id_name_map = {}
+  r_txt_path = options.r_txt_path
+  if not r_txt_path:
+    candidate_r_txt_path = options.input_file + '.R.txt'
+    if os.path.exists(candidate_r_txt_path):
+      r_txt_path = candidate_r_txt_path
+
+  if r_txt_path:
+    res_id_name_map = ReadStringMapFromRTxt(r_txt_path)
+
+  # Create a helper lambda that creates a new ResourceStringMap instance
+  # based on the input file's type.
+  if IsFilePathABundle(options.input_file):
+    if not options.bundletool_path:
+      parser.error(
+          '--bundletool-path <BUNDLETOOL_JAR> is required to parse bundles.')
+
+    # use bundletool to parse the bundle resources.
+    def create_string_map():
+      return ParseBundleResources(options.bundletool_path, options.input_file)
+
+  elif IsFilePathAnApk(options.input_file):
+    if not options.aapt_path:
+      parser.error('--aapt-path <AAPT> is required to parse APKs.')
+
+    # Use aapt dump to parse the APK resources.
+    def create_string_map():
+      return ParseApkResources(options.aapt_path, options.input_file)
+
+  else:
+    parser.error('Unknown file format: %s' % options.input_file)
+
+  # Print everything now.
+  with ManagedOutput(options.output) as output:
+    with ManagedPythonProfiling(options.profile):
+      res_map = create_string_map()
+      res_map.RemapResourceNames(res_id_name_map)
+      lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids))
+      for line in lines:
+        output.write(line)
+        output.write('\n')
+
+
+if __name__ == "__main__":
+  main(sys.argv[1:])
diff --git a/src/build/android/emma_coverage_stats.py b/src/build/android/emma_coverage_stats.py
new file mode 100755
index 0000000..f45f4d4
--- /dev/null
+++ b/src/build/android/emma_coverage_stats.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates incremental code coverage reports for Java code in Chromium.
+
+Usage:
+
+  build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
+    <EMMA file directory> --lines-for-coverage-file
+    <path to file containing lines for coverage>
+
+  Creates a JSON representation of the overall and file coverage stats and saves
+  this information to the specified output file.
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import re
+import sys
+from xml.etree import ElementTree
+
+import devil_chromium
+from devil.utils import run_tests_helper
+
+NOT_EXECUTABLE = -1
+NOT_COVERED = 0
+COVERED = 1
+PARTIALLY_COVERED = 2
+
+# Coverage information about a single line of code.
+LineCoverage = collections.namedtuple(
+    'LineCoverage',
+    ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
+
+
+class _EmmaHtmlParser(object):
+  """Encapsulates HTML file parsing operations.
+
+  This class contains all operations related to parsing HTML files that were
+  produced using the EMMA code coverage tool.
+
+  Example HTML:
+
+  Package links:
+    <a href="_files/1.html">org.chromium.chrome</a>
+    This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
+
+  Class links:
+    <a href="1e.html">DoActivity.java</a>
+    This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
+
+  Line coverage data:
+    <tr class="p">
+       <td class="l" title="78% line coverage (7 out of 9)">108</td>
+       <td title="78% line coverage (7 out of 9 instructions)">
+         if (index < 0 || index = mSelectors.size()) index = 0;</td>
+    </tr>
+    <tr>
+       <td class="l">109</td>
+       <td> </td>
+    </tr>
+    <tr class="c">
+       <td class="l">110</td>
+       <td>        if (mSelectors.get(index) != null) {</td>
+    </tr>
+    <tr class="z">
+       <td class="l">111</td>
+       <td>            for (int i = 0; i < mSelectors.size(); i++) {</td>
+    </tr>
+    Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
+
+    We can parse this to get:
+      1. Line number
+      2. Line of source code
+      3. Coverage status (c, z, or p)
+      4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
+  """
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different packages.
+  _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
+
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different classes within a package.
+  _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
+
+  # Selector to match all <tr> elements within the table containing Java source
+  # code in an EMMA HTML file.
+  _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
+
+  # Children of HTML elements are represented as a list in ElementTree. These
+  # constants represent list indices corresponding to relevant child elements.
+
+  # Child 1 contains percentage covered for a line.
+  _ELEMENT_PERCENT_COVERED = 1
+
+  # Child 1 contains the original line of source code.
+  _ELEMENT_CONTAINING_SOURCE_CODE = 1
+
+  # Child 0 contains the line number.
+  _ELEMENT_CONTAINING_LINENO = 0
+
+  # Maps CSS class names to corresponding coverage constants.
+  _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
+
+  # UTF-8 no break space.
+  _NO_BREAK_SPACE = '\xc2\xa0'
+
+  def __init__(self, emma_file_base_dir):
+    """Initializes _EmmaHtmlParser.
+
+    Args:
+      emma_file_base_dir: Path to the location where EMMA report files are
+        stored. Should be where index.html is stored.
+    """
+    self._base_dir = emma_file_base_dir
+    self._emma_files_path = os.path.join(self._base_dir, '_files')
+    self._index_path = os.path.join(self._base_dir, 'index.html')
+
+  def GetLineCoverage(self, emma_file_path):
+    """Returns a list of LineCoverage objects for the given EMMA HTML file.
+
+    Args:
+      emma_file_path: String representing the path to the EMMA HTML file.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    line_tr_elements = self._FindElements(
+        emma_file_path, self._XPATH_SELECT_LOC)
+    line_coverage = []
+    for tr in line_tr_elements:
+      # Get the coverage status.
+      coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
+      # Get the fractional coverage value.
+      if coverage_status == PARTIALLY_COVERED:
+        title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
+        # Parse string that contains percent covered: "83% line coverage ...".
+        percent_covered = title_attribute.split('%')[0]
+        fractional_coverage = int(percent_covered) / 100.0
+      else:
+        fractional_coverage = 1.0
+
+      # Get the line number.
+      lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
+      # Handles oddly formatted HTML (where there is an extra <a> tag).
+      lineno = int(lineno_element.text or
+                   lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
+      # Get the original line of Java source code.
+      raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
+      utf8_source = raw_source.encode('UTF-8')
+      source = utf8_source.replace(self._NO_BREAK_SPACE, ' ')
+
+      line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
+      line_coverage.append(line)
+
+    return line_coverage
+
+  def GetPackageNameToEmmaFileDict(self):
+    """Returns a dict mapping Java packages to EMMA HTML coverage files.
+
+    Parses the EMMA index.html file to get a list of packages, then parses each
+    package HTML file to get a list of classes for that package, and creates
+    a dict with this info.
+
+    Returns:
+      A dict mapping string representation of Java packages (with class
+        names appended) to the corresponding file paths of EMMA HTML files.
+    """
+    # These <a> elements contain each package name and the path of the file
+    # where all classes within said package are listed.
+    package_link_elements = self._FindElements(
+        self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
+    # Maps file path of package directory (EMMA generated) to package name.
+    # Example: emma_dir/f.html: org.chromium.chrome.
+    package_links = {
+      os.path.join(self._base_dir, link.attrib['HREF']): link.text
+      for link in package_link_elements if 'HREF' in link.attrib
+    }
+
+    package_to_emma = {}
+    for package_emma_file_path, package_name in package_links.iteritems():
+      # These <a> elements contain each class name in the current package and
+      # the path of the file where the coverage info is stored for each class.
+      coverage_file_link_elements = self._FindElements(
+          package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
+
+      for class_name_element in coverage_file_link_elements:
+        emma_coverage_file_path = os.path.join(
+            self._emma_files_path, class_name_element.attrib['HREF'])
+        full_package_name = '%s.%s' % (package_name, class_name_element.text)
+        package_to_emma[full_package_name] = emma_coverage_file_path
+
+    return package_to_emma
+
+  # pylint: disable=no-self-use
+  def _FindElements(self, file_path, xpath_selector):
+    """Reads a HTML file and performs an XPath match.
+
+    Args:
+      file_path: String representing the path to the HTML file.
+      xpath_selector: String representing xpath search pattern.
+
+    Returns:
+      A list of ElementTree.Elements matching the given XPath selector.
+        Returns an empty list if there is no match.
+    """
+    with open(file_path) as f:
+      file_contents = f.read().decode('ISO-8859-1').encode('UTF-8')
+      root = ElementTree.fromstring(file_contents)
+      return root.findall(xpath_selector)
+
+
+class _EmmaCoverageStats(object):
+  """Computes code coverage stats for Java code using the coverage tool EMMA.
+
+  This class provides an API that allows users to capture absolute code coverage
+  and code coverage on a subset of lines for each Java source file. Coverage
+  reports are generated in JSON format.
+  """
+  # Regular expression to get package name from Java package statement.
+  RE_PACKAGE_MATCH_GROUP = 'package'
+  RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
+
+  def __init__(self, emma_file_base_dir, files_for_coverage):
+    """Initialize _EmmaCoverageStats.
+
+    Args:
+      emma_file_base_dir: String representing the path to the base directory
+        where EMMA HTML coverage files are stored, i.e. parent of index.html.
+      files_for_coverage: A list of Java source code file paths to get EMMA
+        coverage for.
+    """
+    self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
+    self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
+
+  def GetCoverageDict(self, lines_for_coverage):
+    """Returns a dict containing detailed coverage information.
+
+    Gets detailed coverage stats for each file specified in the
+    |lines_for_coverage| dict and the total incremental number of lines covered
+    and executable for all files in |lines_for_coverage|.
+
+    Args:
+      lines_for_coverage: A dict mapping Java source file paths to lists of line
+        numbers.
+
+    Returns:
+      A dict containing coverage stats for the given dict of files and lines.
+        Contains absolute coverage stats for each file, coverage stats for each
+        file's lines specified in |lines_for_coverage|, line by line coverage
+        for each file, and overall coverage stats for the lines specified in
+        |lines_for_coverage|.
+    """
+    file_coverage = {}
+    for file_path, line_numbers in lines_for_coverage.iteritems():
+      file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
+      if file_coverage_dict:
+        file_coverage[file_path] = file_coverage_dict
+      else:
+        logging.warning(
+            'No code coverage data for %s, skipping.', file_path)
+
+    covered_statuses = [s['incremental'] for s in file_coverage.itervalues()]
+    num_covered_lines = sum(s['covered'] for s in covered_statuses)
+    num_total_lines = sum(s['total'] for s in covered_statuses)
+    return {
+      'files': file_coverage,
+      'patch': {
+        'incremental': {
+          'covered': num_covered_lines,
+          'total': num_total_lines
+        }
+      }
+    }
+
+  def GetCoverageDictForFile(self, file_path, line_numbers):
+    """Returns a dict containing detailed coverage info for the given file.
+
+    Args:
+      file_path: The path to the Java source file that we want to create the
+        coverage dict for.
+      line_numbers: A list of integer line numbers to retrieve additional stats
+        for.
+
+    Returns:
+      A dict containing absolute, incremental, and line by line coverage for
+        a file.
+    """
+    if file_path not in self._source_to_emma:
+      return None
+    emma_file = self._source_to_emma[file_path]
+    total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
+    incremental_line_coverage = [line for line in total_line_coverage
+                                 if line.lineno in line_numbers]
+    line_by_line_coverage = [
+      {
+        'line': line.source,
+        'coverage': line.covered_status,
+        'changed': line.lineno in line_numbers,
+        'fractional_coverage': line.fractional_line_coverage,
+      }
+      for line in total_line_coverage
+    ]
+    total_covered_lines, total_lines = (
+        self.GetSummaryStatsForLines(total_line_coverage))
+    incremental_covered_lines, incremental_total_lines = (
+        self.GetSummaryStatsForLines(incremental_line_coverage))
+
+    file_coverage_stats = {
+      'absolute': {
+        'covered': total_covered_lines,
+        'total': total_lines
+      },
+      'incremental': {
+        'covered': incremental_covered_lines,
+        'total': incremental_total_lines
+      },
+      'source': line_by_line_coverage,
+    }
+    return file_coverage_stats
+
+  # pylint: disable=no-self-use
+  def GetSummaryStatsForLines(self, line_coverage):
+    """Gets summary stats for a given list of LineCoverage objects.
+
+    Args:
+      line_coverage: A list of LineCoverage objects.
+
+    Returns:
+      A tuple containing the number of lines that are covered and the total
+        number of lines that are executable, respectively
+    """
+    partially_covered_sum = 0
+    covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
+    for line in line_coverage:
+      status = line.covered_status
+      if status == NOT_EXECUTABLE:
+        continue
+      covered_status_totals[status] += 1
+      if status == PARTIALLY_COVERED:
+        partially_covered_sum += line.fractional_line_coverage
+
+    total_covered = covered_status_totals[COVERED] + partially_covered_sum
+    total_lines = sum(covered_status_totals.values())
+    return total_covered, total_lines
+
+  def _GetSourceFileToEmmaFileDict(self, files):
+    """Gets a dict used to correlate Java source files with EMMA HTML files.
+
+    This method gathers the information needed to correlate EMMA HTML
+    files with Java source files. EMMA XML and plain text reports do not provide
+    line by line coverage data, so HTML reports must be used instead.
+    Unfortunately, the HTML files that are created are given garbage names
+    (i.e 1.html) so we need to manually correlate EMMA HTML files
+    with the original Java source files.
+
+    Args:
+      files: A list of file names for which coverage information is desired.
+
+    Returns:
+      A dict mapping Java source file paths to EMMA HTML file paths.
+    """
+    # Maps Java source file paths to package names.
+    # Example: /usr/code/file.java -> org.chromium.file.java.
+    source_to_package = {}
+    for file_path in files:
+      package = self.GetPackageNameFromFile(file_path)
+      if package:
+        source_to_package[file_path] = package
+      else:
+        logging.warning("Skipping %s because it doesn\'t have a package "
+                        "statement.", file_path)
+
+    # Maps package names to EMMA report HTML files.
+    # Example: org.chromium.file.java -> out/coverage/1a.html.
+    package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
+    # Finally, we have a dict mapping Java file paths to EMMA report files.
+    # Example: /usr/code/file.java -> out/coverage/1a.html.
+    source_to_emma = {source: package_to_emma[package]
+                      for source, package in source_to_package.iteritems()
+                      if package in package_to_emma}
+    return source_to_emma
+
+  @staticmethod
+  def NeedsCoverage(file_path):
+    """Checks to see if the file needs to be analyzed for code coverage.
+
+    Args:
+      file_path: A string representing path to the file.
+
+    Returns:
+      True for Java files that exist, False for all others.
+    """
+    if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
+      return True
+    else:
+      logging.info('Skipping file %s, cannot compute code coverage.', file_path)
+      return False
+
+  @staticmethod
+  def GetPackageNameFromFile(file_path):
+    """Gets the full package name including the file name for a given file path.
+
+    Args:
+      file_path: String representing the path to the Java source file.
+
+    Returns:
+      A string representing the full package name with file name appended or
+        None if there is no package statement in the file.
+    """
+    with open(file_path) as f:
+      file_content = f.read()
+      package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
+      if package_match:
+        package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
+        file_name = os.path.basename(file_path)
+        return '%s.%s' % (package, file_name)
+      else:
+        return None
+
+
+def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
+  """Generates a coverage report for a given set of lines.
+
+  Writes the results of the coverage analysis to the file specified by
+  |out_file_path|.
+
+  Args:
+    line_coverage_file: The path to a file which contains a dict mapping file
+      names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
+      that we should compute coverage information on lines 1 - 3 for file1.
+    out_file_path: A string representing the location to write the JSON report.
+    coverage_dir: A string representing the file path where the EMMA
+      HTML coverage files are located (i.e. folder where index.html is located).
+  """
+  with open(line_coverage_file) as f:
+    potential_files_for_coverage = json.load(f)
+
+  files_for_coverage = {f: lines
+                        for f, lines in potential_files_for_coverage.iteritems()
+                        if _EmmaCoverageStats.NeedsCoverage(f)}
+
+  coverage_results = {}
+  if files_for_coverage:
+    code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys())
+    coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
+  else:
+    logging.info('No Java files requiring coverage were included in %s.',
+                 line_coverage_file)
+
+  with open(out_file_path, 'w+') as out_status_file:
+    json.dump(coverage_results, out_status_file)
+
+
+def main():
+  argparser = argparse.ArgumentParser()
+  argparser.add_argument('--out', required=True, type=str,
+                         help='Report output file path.')
+  argparser.add_argument('--emma-dir', required=True, type=str,
+                         help='EMMA HTML report directory.')
+  argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
+                         help='File containing a JSON object. Should contain a '
+                         'dict mapping file names to lists of line numbers of '
+                         'code for which coverage information is desired.')
+  argparser.add_argument('-v', '--verbose', action='count',
+                         help='Print verbose log information.')
+  args = argparser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+  devil_chromium.Initialize()
+  GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/emma_coverage_stats_test.py b/src/build/android/emma_coverage_stats_test.py
new file mode 100755
index 0000000..d53292c
--- /dev/null
+++ b/src/build/android/emma_coverage_stats_test.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+from xml.etree import ElementTree
+
+import emma_coverage_stats
+
+import mock  # pylint: disable=import-error
+
+EMPTY_COVERAGE_STATS_DICT = {
+  'files': {},
+  'patch': {
+    'incremental': {
+      'covered': 0, 'total': 0
+    }
+  }
+}
+
+
+class _EmmaHtmlParserTest(unittest.TestCase):
+  """Tests for _EmmaHtmlParser.
+
+  Uses modified EMMA report HTML that contains only the subset of tags needed
+  for test verification.
+  """
+
+  def setUp(self):
+    self.emma_dir = 'fake/dir/'
+    self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
+    self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
+    self.index_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CLASS="it" CELLSPACING="0">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="_files/0.html"'
+              '>org.chromium.chrome.browser</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="_files/1.html"'
+              '>org.chromium.chrome.browser.tabmodel</A></TD>'
+              '<TD CLASS="h">0%   (0/8)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_1_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+              '<TD CLASS="h">0%   (0/9)</TD>'
+              '<TD CLASS="h">0%   (0/97)</TD>'
+              '<TD CLASS="h">0%   (0/26)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_2_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
+              '<TD CLASS="h">0%   (0/1)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.partially_covered_tr_html = (
+      '<TR CLASS="p">'
+        '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
+        '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
+          'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
+      '</TR>'
+    )
+    self.covered_tr_html = (
+      '<TR CLASS="c">'
+        '<TD CLASS="l">110</TD>'
+        '<TD>        if (mSelectors.get(index) != null) {</TD>'
+      '</TR>'
+    )
+    self.not_executable_tr_html = (
+      '<TR>'
+        '<TD CLASS="l">109</TD>'
+        '<TD> </TD>'
+      '</TR>'
+    )
+    self.tr_with_extra_a_tag = (
+      '<TR CLASS="z">'
+        '<TD CLASS="l">'
+          '<A name="1f">54</A>'
+        '</TD>'
+        '<TD>            }</TD>'
+      '</TR>'
+    )
+
+  def testInit(self):
+    emma_dir = self.emma_dir
+    parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
+    self.assertEqual(parser._base_dir, emma_dir)
+    self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
+    self.assertEqual(parser._index_path, 'fake/dir/index.html')
+
+  def testFindElements_basic(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TD')
+    self.assertIs(type(found), list)
+    self.assertIs(type(found[0]), ElementTree.Element)
+    self.assertEqual(found[0].text, 'Test HTML')
+
+  def testFindElements_multipleElements(self):
+    multiple_trs = self.not_executable_tr_html + self.covered_tr_html
+    read_values = ['<div>' + multiple_trs + '</div>']
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEquals(2, len(found))
+
+  def testFindElements_noMatch(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEqual(found, [])
+
+  def testFindElements_badFilePath(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        self.parser._FindElements('fake', xpath_selector='//tr')
+
+  def testGetPackageNameToEmmaFileDict_basic(self):
+    expected_dict = {
+      'org.chromium.chrome.browser.AccessibilityUtil.java':
+      'fake/dir/_files/23.html',
+      'org.chromium.chrome.browser.ContextualMenuBar.java':
+      'fake/dir/_files/22.html',
+      'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
+      'fake/dir/_files/1e.html',
+      'org.chromium.chrome.browser.ContentSetting.java':
+      'fake/dir/_files/1f.html',
+      'org.chromium.chrome.browser.DevToolsServer.java':
+      'fake/dir/_files/20.html',
+      'org.chromium.chrome.browser.NavigationPopup.java':
+      'fake/dir/_files/24.html',
+      'org.chromium.chrome.browser.FileProviderHelper.java':
+      'fake/dir/_files/21.html'}
+
+    read_values = [self.index_html, self.package_1_class_list_html,
+                   self.package_2_class_list_html]
+    return_dict, mock_open = MockOpenForFunction(
+        self.parser.GetPackageNameToEmmaFileDict, read_values)
+
+    self.assertDictEqual(return_dict, expected_dict)
+    self.assertEqual(mock_open.call_count, 3)
+    calls = [mock.call('fake/dir/index.html'),
+             mock.call('fake/dir/_files/1.html'),
+             mock.call('fake/dir/_files/0.html')]
+    mock_open.assert_has_calls(calls)
+
+  def testGetPackageNameToEmmaFileDict_noPackageElements(self):
+    self.parser._FindElements = mock.Mock(return_value=[])
+    return_dict = self.parser.GetPackageNameToEmmaFileDict()
+    self.assertDictEqual({}, return_dict)
+
+  def testGetLineCoverage_status_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.COVERED)
+
+  def testGetLineCoverage_status_statusMissing(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.not_executable_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.NOT_EXECUTABLE)
+
+  def testGetLineCoverage_fractionalCoverage_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
+
+  def testGetLineCoverage_fractionalCoverage_partial(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.partially_covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
+
+  def testGetLineCoverage_lineno_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].lineno, 110)
+
+  def testGetLineCoverage_lineno_withAlternativeHtml(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.tr_with_extra_a_tag])
+    self.assertEqual(line_coverage[0].lineno, 54)
+
+  def testGetLineCoverage_source(self):
+    self.parser._FindElements = mock.Mock(
+        return_value=[ElementTree.fromstring(self.covered_tr_html)])
+    line_coverage = self.parser.GetLineCoverage('fake_path')
+    self.assertEqual(line_coverage[0].source,
+                     '        if (mSelectors.get(index) != null) {')
+
+  def testGetLineCoverage_multipleElements(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.covered_tr_html, self.partially_covered_tr_html,
+         self.tr_with_extra_a_tag])
+    self.assertEqual(len(line_coverage), 3)
+
+  def GetLineCoverageWithFakeElements(self, html_elements):
+    """Wraps GetLineCoverage so mock HTML can easily be used.
+
+    Args:
+      html_elements: List of strings each representing an HTML element.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    elements = [ElementTree.fromstring(string) for string in html_elements]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=elements):
+      return self.parser.GetLineCoverage('fake_path')
+
+
+class _EmmaCoverageStatsTest(unittest.TestCase):
+  """Tests for _EmmaCoverageStats."""
+
+  def setUp(self):
+    self.good_source_to_emma = {
+      '/path/to/1/File1.java': '/emma/1.html',
+      '/path/2/File2.java': '/emma/2.html',
+      '/path/2/File3.java': '/emma/3.html'
+    }
+    self.line_coverage = [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
+        emma_coverage_stats.LineCoverage(
+            6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
+    ]
+    self.lines_for_coverage = [1, 3, 5, 6]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=[]):
+      self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
+          'fake_dir', {})
+
+  def testInit(self):
+    coverage_stats = self.simple_coverage
+    self.assertIsInstance(coverage_stats._emma_parser,
+                          emma_coverage_stats._EmmaHtmlParser)
+    self.assertIsInstance(coverage_stats._source_to_emma, dict)
+
+  def testNeedsCoverage_withExistingJavaFile(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertTrue(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_withNonJavaFile(self):
+    test_file = '/path/to/file/File.c'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_fileDoesNotExist(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=False):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testGetPackageNameFromFile_basic(self):
+    test_file_text = """// Test Copyright
+    package org.chromium.chrome.browser;
+    import android.graphics.RectF;"""
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        [test_file_text], file_path='/path/to/file/File.java')
+    self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
+
+  def testGetPackageNameFromFile_noPackageStatement(self):
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        ['not a package statement'], file_path='/path/to/file/File.java')
+    self.assertIsNone(result_package)
+
+  def testGetSummaryStatsForLines_basic(self):
+    covered, total = self.simple_coverage.GetSummaryStatsForLines(
+        self.line_coverage)
+    self.assertEqual(covered, 3.05)
+    self.assertEqual(total, 5)
+
+  def testGetSourceFileToEmmaFileDict(self):
+    package_names = {
+      '/path/to/1/File1.java': 'org.fake.one.File1.java',
+      '/path/2/File2.java': 'org.fake.File2.java',
+      '/path/2/File3.java': 'org.fake.File3.java'
+    }
+    package_to_emma = {
+      'org.fake.one.File1.java': '/emma/1.html',
+      'org.fake.File2.java': '/emma/2.html',
+      'org.fake.File3.java': '/emma/3.html'
+    }
+    with mock.patch('os.path.exists', return_value=True):
+      coverage_stats = self.simple_coverage
+      coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
+          return_value=package_to_emma)
+      coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
+      result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
+          package_names.keys())
+    self.assertDictEqual(result_dict, self.good_source_to_emma)
+
+  def testGetCoverageDictForFile(self):
+    line_coverage = self.line_coverage
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
+    self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
+    lines = self.lines_for_coverage
+    expected_dict = {
+      'absolute': {
+        'covered': 3.05,
+        'total': 5
+      },
+      'incremental': {
+        'covered': 2.05,
+        'total': 3
+      },
+      'source': [
+        {
+          'line': line_coverage[0].source,
+          'coverage': line_coverage[0].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[0].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[1].source,
+          'coverage': line_coverage[1].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[1].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[2].source,
+          'coverage': line_coverage[2].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[2].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[3].source,
+          'coverage': line_coverage[3].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[3].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[4].source,
+          'coverage': line_coverage[4].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[4].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[5].source,
+          'coverage': line_coverage[5].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[5].fractional_line_coverage,
+        }
+      ]
+    }
+    result_dict = self.simple_coverage.GetCoverageDictForFile(
+        '/fake/src', lines)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_emptyCoverage(self):
+    expected_dict = {
+      'absolute': {'covered': 0, 'total': 0},
+      'incremental': {'covered': 0, 'total': 0},
+      'source': []
+    }
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
+    self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_missingCoverage(self):
+    self.simple_coverage._source_to_emma = {}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
+    self.assertIsNone(result_dict)
+
+  def testGetCoverageDict_basic(self):
+    files_for_coverage = {
+      '/path/to/1/File1.java': [1, 3, 4],
+      '/path/2/File2.java': [1, 2]
+    }
+    self.simple_coverage._source_to_emma = {
+      '/path/to/1/File1.java': 'emma_1',
+      '/path/2/File2.java': 'emma_2'
+    }
+    coverage_info = {
+      'emma_1': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.COVERED, 1.0)
+      ],
+      'emma_2': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0)
+      ]
+    }
+    expected_dict = {
+      'files': {
+        '/path/2/File2.java': {
+          'absolute': {'covered': 1, 'total': 2},
+          'incremental': {'covered': 1, 'total': 2},
+          'source': [{'changed': True, 'coverage': 0,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        },
+        '/path/to/1/File1.java': {
+          'absolute': {'covered': 2.5, 'total': 3},
+          'incremental': {'covered': 2, 'total': 2},
+          'source': [{'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': False, 'coverage': 2,
+                      'line': '', 'fractional_coverage': 0.5},
+                     {'changed': True, 'coverage': -1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        }
+      },
+      'patch': {'incremental': {'covered': 3, 'total': 4}}
+    }
+    # Return the relevant coverage info for each file.
+    self.simple_coverage._emma_parser.GetLineCoverage = (
+        lambda x: coverage_info[x])
+    result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDict_noCoverage(self):
+    result_dict = self.simple_coverage.GetCoverageDict({})
+    self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
+
+
+class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
+  """Tests for GenerateCoverageReport."""
+
+  def testGenerateCoverageReport_missingJsonFile(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        emma_coverage_stats.GenerateCoverageReport('', '', '')
+
+  def testGenerateCoverageReport_invalidJsonFile(self):
+    with self.assertRaises(ValueError):
+      with mock.patch('os.path.exists', return_value=True):
+        MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
+                            line_coverage_file='', out_file_path='',
+                            coverage_dir='')
+
+
+def MockOpenForFunction(func, side_effects, **kwargs):
+  """Allows easy mock open and read for callables that open multiple files.
+
+  Will mock the python open function in a way such that each time read() is
+  called on an open file, the next element in |side_effects| is returned. This
+  makes it easier to test functions that call open() multiple times.
+
+  Args:
+    func: The callable to invoke once mock files are setup.
+    side_effects: A list of return values for each file to return once read.
+      Length of list should be equal to the number calls to open in |func|.
+    **kwargs: Keyword arguments to be passed to |func|.
+
+  Returns:
+    A tuple containing the return value of |func| and the MagicMock object used
+      to mock all calls to open respectively.
+  """
+  mock_open = mock.mock_open()
+  mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
+                           for side_effect in side_effects]
+  with mock.patch('__builtin__.open', mock_open):
+    return func(**kwargs), mock_open
+
+
+if __name__ == '__main__':
+  # Suppress logging messages.
+  unittest.main(buffer=True)
diff --git a/src/build/android/envsetup.sh b/src/build/android/envsetup.sh
new file mode 100755
index 0000000..7f549d9
--- /dev/null
+++ b/src/build/android/envsetup.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Adds Android SDK tools and related helpers to PATH, useful for development.
+# Not used on bots, nor required for any commands to succeed.
+# Use like: source build/android/envsetup.sh
+
+# Make sure we're being sourced.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+  local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")"
+
+  # Some tools expect these environmental variables.
+  export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public"
+  # ANDROID_HOME is deprecated, but generally means the same thing as
+  # ANDROID_SDK_ROOT and shouldn't hurt to set it.
+  export ANDROID_HOME="$ANDROID_SDK_ROOT"
+
+  # Set up PATH to point to SDK-provided (and other) tools, such as 'adb'.
+  export PATH=${CHROME_SRC}/build/android:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/tools/:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/platform-tools:$PATH
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
diff --git a/src/build/android/fast_local_dev_server.py b/src/build/android/fast_local_dev_server.py
new file mode 100755
index 0000000..a35c500
--- /dev/null
+++ b/src/build/android/fast_local_dev_server.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an server to offload non-critical-path GN targets."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import queue
+import shutil
+import socket
+import subprocess
+import sys
+import threading
+from typing import Callable, Dict, List, Optional, Tuple
+
+sys.path.append(os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import server_utils
+
+
+def log(msg: str, *, end: str = ''):
+  # Shrink the message (leaving a 2-char prefix and use the rest of the room
+  # for the suffix) according to terminal size so it is always one line.
+  width = shutil.get_terminal_size().columns
+  prefix = f'[{TaskStats.prefix()}] '
+  max_msg_width = width - len(prefix)
+  if len(msg) > max_msg_width:
+    length_to_show = max_msg_width - 5  # Account for ellipsis and header.
+    msg = f'{msg[:2]}...{msg[-length_to_show:]}'
+  # \r to return the carriage to the beginning of line.
+  # \033[K to replace the normal \n to erase until the end of the line.
+  # Avoid the default line ending so the next \r overwrites the same line just
+  #     like ninja's output.
+  print(f'\r{prefix}{msg}\033[K', end=end, flush=True)
+
+
+class TaskStats:
+  """Class to keep track of aggregate stats for all tasks across threads."""
+  _num_processes = 0
+  _completed_tasks = 0
+  _total_tasks = 0
+  _lock = threading.Lock()
+
+  @classmethod
+  def no_running_processes(cls):
+    return cls._num_processes == 0
+
+  @classmethod
+  def add_task(cls):
+    # Only the main thread calls this, so there is no need for locking.
+    cls._total_tasks += 1
+
+  @classmethod
+  def add_process(cls):
+    with cls._lock:
+      cls._num_processes += 1
+
+  @classmethod
+  def remove_process(cls):
+    with cls._lock:
+      cls._num_processes -= 1
+
+  @classmethod
+  def complete_task(cls):
+    with cls._lock:
+      cls._completed_tasks += 1
+
+  @classmethod
+  def prefix(cls):
+    # Ninja's prefix is: [205 processes, 6/734 @ 6.5/s : 0.922s ]
+    # Time taken and task completion rate are not important for the build server
+    # since it is always running in the background and uses idle priority for
+    # its tasks.
+    with cls._lock:
+      word = 'process' if cls._num_processes == 1 else 'processes'
+      return (f'{cls._num_processes} {word}, '
+              f'{cls._completed_tasks}/{cls._total_tasks}')
+
+
+class TaskManager:
+  """Class to encapsulate a threadsafe queue and handle deactivating it."""
+
+  def __init__(self):
+    self._queue: queue.SimpleQueue[Task] = queue.SimpleQueue()
+    self._deactivated = False
+
+  def add_task(self, task: Task):
+    assert not self._deactivated
+    TaskStats.add_task()
+    self._queue.put(task)
+    log(f'QUEUED {task.name}')
+    self._maybe_start_tasks()
+
+  def deactivate(self):
+    self._deactivated = True
+    while not self._queue.empty():
+      try:
+        task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      task.terminate()
+
+  @staticmethod
+  def _num_running_processes():
+    with open('/proc/stat') as f:
+      for line in f:
+        if line.startswith('procs_running'):
+          return int(line.rstrip().split()[1])
+    assert False, 'Could not read /proc/stat'
+
+  def _maybe_start_tasks(self):
+    if self._deactivated:
+      return
+    # Include load avg so that a small dip in the number of currently running
+    # processes will not cause new tasks to be started while the overall load is
+    # heavy.
+    cur_load = max(self._num_running_processes(), os.getloadavg()[0])
+    num_started = 0
+    # Always start a task if we don't have any running, so that all tasks are
+    # eventually finished. Try starting up tasks when the overall load is light.
+    # Limit to at most 2 new tasks to prevent ramping up too fast. There is a
+    # chance where multiple threads call _maybe_start_tasks and each gets to
+    # spawn up to 2 new tasks, but since the only downside is some build tasks
+    # get worked on earlier rather than later, it is not worth mitigating.
+    while num_started < 2 and (TaskStats.no_running_processes()
+                               or num_started + cur_load < os.cpu_count()):
+      try:
+        next_task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      num_started += next_task.start(self._maybe_start_tasks)
+
+
+# TODO(wnwen): Break this into Request (encapsulating what ninja sends) and Task
+#              when a Request starts to be run. This would eliminate ambiguity
+#              about when and whether _proc/_thread are initialized.
+class Task:
+  """Class to represent one task and operations on it."""
+
+  def __init__(self, name: str, cwd: str, cmd: List[str], stamp_file: str):
+    self.name = name
+    self.cwd = cwd
+    self.cmd = cmd
+    self.stamp_file = stamp_file
+    self._terminated = False
+    self._lock = threading.Lock()
+    self._proc: Optional[subprocess.Popen] = None
+    self._thread: Optional[threading.Thread] = None
+    self._return_code: Optional[int] = None
+
+  @property
+  def key(self):
+    return (self.cwd, self.name)
+
+  def start(self, on_complete_callback: Callable[[], None]) -> int:
+    """Starts the task if it has not already been terminated.
+
+    Returns the number of processes that have been started. This is called at
+    most once when the task is popped off the task queue."""
+
+    # The environment variable forces the script to actually run in order to
+    # avoid infinite recursion.
+    env = os.environ.copy()
+    env[server_utils.BUILD_SERVER_ENV_VARIABLE] = '1'
+
+    with self._lock:
+      if self._terminated:
+        return 0
+      # Use os.nice(19) to ensure the lowest priority (idle) for these analysis
+      # tasks since we want to avoid slowing down the actual build.
+      # TODO(wnwen): Use ionice to reduce resource consumption.
+      TaskStats.add_process()
+      log(f'STARTING {self.name}')
+      self._proc = subprocess.Popen(
+          self.cmd,
+          stdout=subprocess.PIPE,
+          stderr=subprocess.STDOUT,
+          cwd=self.cwd,
+          env=env,
+          text=True,
+          preexec_fn=lambda: os.nice(19),
+      )
+      self._thread = threading.Thread(
+          target=self._complete_when_process_finishes,
+          args=(on_complete_callback, ))
+      self._thread.start()
+      return 1
+
+  def terminate(self):
+    """Can be called multiple times to cancel and ignore the task's output."""
+
+    with self._lock:
+      if self._terminated:
+        return
+      self._terminated = True
+    # It is safe to access _proc and _thread outside of _lock since they are
+    # only changed by self.start holding _lock when self._terminate is false.
+    # Since we have just set self._terminate to true inside of _lock, we know
+    # that neither _proc nor _thread will be changed from this point onwards.
+    if self._proc:
+      self._proc.terminate()
+      self._proc.wait()
+    # Ensure that self._complete is called either by the thread or by us.
+    if self._thread:
+      self._thread.join()
+    else:
+      self._complete()
+
+  def _complete_when_process_finishes(self,
+                                      on_complete_callback: Callable[[], None]):
+    assert self._proc
+    # We know Popen.communicate will return a str and not a byte since it is
+    # constructed with text=True.
+    stdout: str = self._proc.communicate()[0]
+    self._return_code = self._proc.returncode
+    TaskStats.remove_process()
+    self._complete(stdout)
+    on_complete_callback()
+
+  def _complete(self, stdout: str = ''):
+    """Update the user and ninja after the task has run or been terminated.
+
+    This method should only be run once per task. Avoid modifying the task so
+    that this method does not need locking."""
+
+    TaskStats.complete_task()
+    failed = False
+    if self._terminated:
+      log(f'TERMINATED {self.name}')
+      # Ignore stdout as it is now outdated.
+      failed = True
+    else:
+      log(f'FINISHED {self.name}')
+      if stdout or self._return_code != 0:
+        failed = True
+        # An extra new line is needed since we want to preserve the previous
+        # _log line. Use a single print so that it is threadsafe.
+        # TODO(wnwen): Improve stdout display by parsing over it and moving the
+        #              actual error to the bottom. Otherwise long command lines
+        #              in the Traceback section obscure the actual error(s).
+        print('\n' + '\n'.join([
+            f'FAILED: {self.name}',
+            f'Return code: {self._return_code}',
+            ' '.join(self.cmd),
+            stdout,
+        ]))
+
+    if failed:
+      # Force ninja to consider failed targets as dirty.
+      try:
+        os.unlink(os.path.join(self.cwd, self.stamp_file))
+      except FileNotFoundError:
+        pass
+    else:
+      # Ninja will rebuild targets when their inputs change even if their stamp
+      # file has a later modified time. Thus we do not need to worry about the
+      # script being run by the build server updating the mtime incorrectly.
+      pass
+
+
+def _listen_for_request_data(sock: socket.socket):
+  while True:
+    conn = sock.accept()[0]
+    received = []
+    with conn:
+      while True:
+        data = conn.recv(4096)
+        if not data:
+          break
+        received.append(data)
+    if received:
+      yield json.loads(b''.join(received))
+
+
+def _process_requests(sock: socket.socket):
+  # Since dicts in python can contain anything, explicitly type tasks to help
+  # make static type checking more useful.
+  tasks: Dict[Tuple[str, str], Task] = {}
+  task_manager = TaskManager()
+  try:
+    for data in _listen_for_request_data(sock):
+      task = Task(name=data['name'],
+                  cwd=data['cwd'],
+                  cmd=data['cmd'],
+                  stamp_file=data['stamp_file'])
+      existing_task = tasks.get(task.key)
+      if existing_task:
+        existing_task.terminate()
+      tasks[task.key] = task
+      task_manager.add_task(task)
+  except KeyboardInterrupt:
+    log('STOPPING SERVER...', end='\n')
+    # Gracefully shut down the task manager, terminating all queued tasks.
+    task_manager.deactivate()
+    # Terminate all currently running tasks.
+    for task in tasks.values():
+      task.terminate()
+    log('STOPPED', end='\n')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.parse_args()
+  with socket.socket(socket.AF_UNIX) as sock:
+    sock.bind(server_utils.SOCKET_ADDRESS)
+    sock.listen()
+    _process_requests(sock)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/generate_jacoco_report.py b/src/build/android/generate_jacoco_report.py
new file mode 100755
index 0000000..d0a9987
--- /dev/null
+++ b/src/build/android/generate_jacoco_report.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env vpython
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates Jacoco coverage files to produce output."""
+
+from __future__ import print_function
+
+import argparse
+import fnmatch
+import json
+import os
+import sys
+
+import devil_chromium
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+
+# Source paths should be passed to Jacoco in a way that the relative file paths
+# reflect the class package name.
+_PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium']
+
+# The sources_json_file is generated by jacoco_instr.py with source directories
+# and input path to non-instrumented jars.
+# e.g.
+# 'source_dirs': [
+#   "chrome/android/java/src/org/chromium/chrome/browser/toolbar/bottom",
+#   "chrome/android/java/src/org/chromium/chrome/browser/ui/system",
+# ...]
+# 'input_path':
+#   '$CHROMIUM_OUTPUT_DIR/\
+#    obj/chrome/android/features/tab_ui/java__process_prebuilt-filtered.jar'
+
+_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json'
+
+# These should match the jar class files generated in internal_rules.gni
+_DEVICE_CLASS_EXCLUDE_SUFFIX = 'host_filter.jar'
+_HOST_CLASS_EXCLUDE_SUFFIX = 'device_filter.jar'
+
+
+def _CreateClassfileArgs(class_files, exclude_suffix=None):
+  """Returns a list of files that don't have a given suffix.
+
+  Args:
+    class_files: A list of class files.
+    exclude_suffix: Suffix to look for to exclude.
+
+  Returns:
+    A list of files that don't use the suffix.
+  """
+  result_class_files = []
+  for f in class_files:
+    if exclude_suffix:
+      if not f.endswith(exclude_suffix):
+        result_class_files += ['--classfiles', f]
+    else:
+      result_class_files += ['--classfiles', f]
+
+  return result_class_files
+
+
+def _GenerateReportOutputArgs(args, class_files, report_type):
+  class_jar_exclude = None
+  if report_type == 'device':
+    class_jar_exclude = _DEVICE_CLASS_EXCLUDE_SUFFIX
+  elif report_type == 'host':
+    class_jar_exclude = _HOST_CLASS_EXCLUDE_SUFFIX
+
+  cmd = _CreateClassfileArgs(class_files, class_jar_exclude)
+  if args.format == 'html':
+    report_dir = os.path.join(args.output_dir, report_type)
+    if not os.path.exists(report_dir):
+      os.makedirs(report_dir)
+    cmd += ['--html', report_dir]
+  elif args.format == 'xml':
+    cmd += ['--xml', args.output_file]
+  elif args.format == 'csv':
+    cmd += ['--csv', args.output_file]
+
+  return cmd
+
+
+def _GetFilesWithSuffix(root_dir, suffix):
+  """Gets all files with a given suffix.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    suffix: Suffix to look for.
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*' + suffix)
+    files.extend([os.path.join(root, basename) for basename in basenames])
+
+  return files
+
+
+def _GetExecFiles(root_dir, exclude_substr=None):
+  """ Gets all .exec files
+
+  Args:
+    root_dir: Root directory in which to search for files.
+    exclude_substr: Substring which should be absent in filename. If None, all
+      files are selected.
+
+  Returns:
+    A list of absolute paths to .exec files
+
+  """
+  all_exec_files = _GetFilesWithSuffix(root_dir, ".exec")
+  valid_exec_files = []
+  for exec_file in all_exec_files:
+    if not exclude_substr or exclude_substr not in exec_file:
+      valid_exec_files.append(exec_file)
+  return valid_exec_files
+
+
+def _ParseArguments(parser):
+  """Parses the command line arguments.
+
+  Args:
+    parser: ArgumentParser object.
+
+  Returns:
+    The parsed arguments.
+  """
+  parser.add_argument(
+      '--format',
+      required=True,
+      choices=['html', 'xml', 'csv'],
+      help='Output report format. Choose one from html, xml and csv.')
+  parser.add_argument(
+      '--device-or-host',
+      choices=['device', 'host'],
+      help='Selection on whether to use the device classpath files or the '
+      'host classpath files. Host would typically be used for junit tests '
+      ' and device for tests that run on the device. Only used for xml and csv'
+      ' reports.')
+  parser.add_argument('--output-dir', help='html report output directory.')
+  parser.add_argument('--output-file',
+                      help='xml file to write device coverage results.')
+  parser.add_argument(
+      '--coverage-dir',
+      required=True,
+      help='Root of the directory in which to search for '
+      'coverage data (.exec) files.')
+  parser.add_argument('--exec-filename-excludes',
+                      required=False,
+                      help='Excludes .exec files which contain a particular '
+                      'substring in their name')
+  parser.add_argument(
+      '--sources-json-dir',
+      help='Root of the directory in which to search for '
+      '*__jacoco_sources.json files.')
+  parser.add_argument(
+      '--class-files',
+      nargs='+',
+      help='Location of Java non-instrumented class files. '
+      'Use non-instrumented jars instead of instrumented jars. '
+      'e.g. use chrome_java__process_prebuilt_(host/device)_filter.jar instead'
+      'of chrome_java__process_prebuilt-instrumented.jar')
+  parser.add_argument(
+      '--sources',
+      nargs='+',
+      help='Location of the source files. '
+      'Specified source folders must be the direct parent of the folders '
+      'that define the Java packages.'
+      'e.g. <src_dir>/chrome/android/java/src/')
+  parser.add_argument(
+      '--cleanup',
+      action='store_true',
+      help='If set, removes coverage files generated at '
+      'runtime.')
+  args = parser.parse_args()
+
+  if args.format == 'html' and not args.output_dir:
+    parser.error('--output-dir needed for report.')
+  if args.format in ('csv', 'xml'):
+    if not args.output_file:
+      parser.error('--output-file needed for xml/csv reports.')
+    if not args.device_or_host and args.sources_json_dir:
+      parser.error('--device-or-host selection needed with --sources-json-dir')
+  if not (args.sources_json_dir or args.class_files):
+    parser.error('At least either --sources-json-dir or --class-files needed.')
+  return args
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  args = _ParseArguments(parser)
+
+  devil_chromium.Initialize()
+
+  coverage_files = _GetExecFiles(args.coverage_dir, args.exec_filename_excludes)
+  if not coverage_files:
+    parser.error('No coverage file found under %s' % args.coverage_dir)
+  print('Found coverage files: %s' % str(coverage_files))
+
+  class_files = []
+  source_dirs = []
+  if args.sources_json_dir:
+    sources_json_files = _GetFilesWithSuffix(args.sources_json_dir,
+                                             _SOURCES_JSON_FILES_SUFFIX)
+    for f in sources_json_files:
+      with open(f, 'r') as json_file:
+        data = json.load(json_file)
+        class_files.extend(data['input_path'])
+        source_dirs.extend(data['source_dirs'])
+
+  # Fix source directories as direct parent of Java packages.
+  fixed_source_dirs = set()
+  for path in source_dirs:
+    for partial in _PARTIAL_PACKAGE_NAMES:
+      if partial in path:
+        fixed_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                 path[:path.index(partial)])
+        fixed_source_dirs.add(fixed_dir)
+        break
+
+  if args.class_files:
+    class_files += args.class_files
+  if args.sources:
+    fixed_source_dirs.update(args.sources)
+
+  cmd = [
+      'java', '-jar',
+      os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib',
+                   'jacococli.jar'), 'report'
+  ] + coverage_files
+
+  for source in fixed_source_dirs:
+    cmd += ['--sourcefiles', source]
+
+  if args.format == 'html':
+    # Both reports are generated for html as the cq bot generates an html
+    # report and we wouldn't know which one a developer needed.
+    device_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'device')
+    host_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'host')
+    device_exit_code = cmd_helper.RunCmd(device_cmd)
+    host_exit_code = cmd_helper.RunCmd(host_cmd)
+    exit_code = device_exit_code or host_exit_code
+  else:
+    cmd = cmd + _GenerateReportOutputArgs(args, class_files,
+                                          args.device_or_host)
+    exit_code = cmd_helper.RunCmd(cmd)
+
+  if args.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  # Command tends to exit with status 0 when it actually failed.
+  if not exit_code:
+    if args.format == 'html':
+      if not os.path.isdir(args.output_dir) or not os.listdir(args.output_dir):
+        print('No report generated at %s' % args.output_dir)
+        exit_code = 1
+    elif not os.path.isfile(args.output_file):
+      print('No device coverage report generated at %s' % args.output_file)
+      exit_code = 1
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gradle/AndroidManifest.xml b/src/build/android/gradle/AndroidManifest.xml
new file mode 100644
index 0000000..f3e50e0
--- /dev/null
+++ b/src/build/android/gradle/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright 2018 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by Android Studio's _all target.
+  No <uses-sdk> is allowed due to https://crbug.com/841529.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy">
+</manifest>
diff --git a/src/build/android/gradle/android.jinja b/src/build/android/gradle/android.jinja
new file mode 100644
index 0000000..40d4506
--- /dev/null
+++ b/src/build/android/gradle/android.jinja
@@ -0,0 +1,114 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_sourceset(variables, prefix) %}
+{% if variables is defined %}
+        {{ prefix }} {
+{% if variables.android_manifest is defined %}
+            manifest.srcFile "{{ variables.android_manifest }}"
+{% endif %}
+{% if variables.java_dirs is defined %}
+            java.srcDirs = [
+{% for path in variables.java_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.java_excludes is defined %}
+            java.filter.exclude([
+{% for path in variables.java_excludes %}
+                "{{ path }}",
+{% endfor %}
+            ])
+{% endif %}
+{% if variables.jni_libs is defined %}
+            jniLibs.srcDirs = [
+{% for path in variables.jni_libs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.res_dirs is defined %}
+            res.srcDirs = [
+{% for path in variables.res_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+        }
+{% endif %}
+{% endmacro %}
+// Generated by //build/android/generate_gradle.py
+
+{% if template_type in ('android_library', 'android_junit') %}
+apply plugin: "com.android.library"
+{% elif template_type == 'android_apk' %}
+apply plugin: "com.android.application"
+{% endif %}
+
+android {
+    compileSdkVersion "{{ compile_sdk_version }}"
+
+    defaultConfig {
+        vectorDrawables.useSupportLibrary = true
+        minSdkVersion 21
+        targetSdkVersion {{ target_sdk_version }}
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
+{% if native is defined %}
+    externalNativeBuild {
+        cmake {
+            path "CMakeLists.txt"
+        }
+    }
+{% endif %}
+
+    sourceSets {
+{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %}
+        {{ name }} {
+            aidl.srcDirs = []
+            assets.srcDirs = []
+            java.srcDirs = []
+            jni.srcDirs = []
+            renderscript.srcDirs = []
+            res.srcDirs = []
+            resources.srcDirs = []
+        }
+{% endfor %}
+
+{{ expand_sourceset(main, 'main') }}
+{{ expand_sourceset(test, 'test') }}
+{% if android_test is defined %}
+{% for t in android_test %}
+{{ expand_sourceset(t, 'androidTest') }}
+{% endfor %}
+{% endif %}
+    }
+}
+
+{% include 'dependencies.jinja' %}
+
+afterEvaluate {
+    def tasksToDisable = tasks.findAll {
+        return (it.name.equals('generateDebugSources')  // causes unwanted AndroidManifest.java
+                || it.name.equals('generateReleaseSources')
+                || it.name.endsWith('BuildConfig')  // causes unwanted BuildConfig.java
+                || it.name.equals('preDebugAndroidTestBuild')
+{% if not use_gradle_process_resources %}
+                || it.name.endsWith('Assets')
+                || it.name.endsWith('Resources')
+                || it.name.endsWith('ResValues')
+{% endif %}
+                || it.name.endsWith('Aidl')
+                || it.name.endsWith('Renderscript')
+                || it.name.endsWith('Shaders'))
+    }
+    tasksToDisable.each { Task task ->
+      task.enabled = false
+    }
+}
diff --git a/src/build/android/gradle/cmake.jinja b/src/build/android/gradle/cmake.jinja
new file mode 100644
index 0000000..b727388
--- /dev/null
+++ b/src/build/android/gradle/cmake.jinja
@@ -0,0 +1,25 @@
+{# Copyright 2018 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+# Generated by //build/android/generate_gradle.py
+
+cmake_minimum_required(VERSION 3.4.1)
+
+project(chrome C CXX)
+
+{% if native.includes is defined %}
+include_directories(
+{% for path in native.includes %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
+
+# Android studio will index faster when adding all sources into one library.
+{% if native.sources is defined %}
+add_library("chromium"
+{% for path in native.sources %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
diff --git a/src/build/android/gradle/dependencies.jinja b/src/build/android/gradle/dependencies.jinja
new file mode 100644
index 0000000..87bc312
--- /dev/null
+++ b/src/build/android/gradle/dependencies.jinja
@@ -0,0 +1,28 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_deps(variables, prefix) %}
+{% if variables is defined %}
+{% if variables.prebuilts is defined %}
+{% for path in variables.prebuilts %}
+    {{ prefix }} files("{{ path }}")
+{% endfor %}
+{% endif %}
+{% if variables.java_project_deps is defined %}
+{% for proj in variables.java_project_deps %}
+    {{ prefix }} project(":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% if variables.android_project_deps is defined %}
+{% for proj in variables.android_project_deps %}
+    {{ prefix }} project(path: ":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+dependencies {
+{{ expand_deps(main, 'implementation') }}
+{{ expand_deps(test, 'testImplementation') }}
+{{ expand_deps(android_test, 'androidTestImplementation') }}
+}
diff --git a/src/build/android/gradle/generate_gradle.py b/src/build/android/gradle/generate_gradle.py
new file mode 100755
index 0000000..80d0b0a
--- /dev/null
+++ b/src/build/android/gradle/generate_gradle.py
@@ -0,0 +1,932 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an Android Studio project from a GN target."""
+
+import argparse
+import codecs
+import collections
+import glob
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+import devil_chromium
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+import jinja_template
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.dirname(_BUILD_ANDROID))
+import gn_helpers
+
+_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
+                                 'depot_tools')
+_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle',
+    'AndroidManifest.xml')
+_FILE_DIR = os.path.dirname(__file__)
+_GENERATED_JAVA_SUBDIR = 'generated_java'
+_JNI_LIBS_SUBDIR = 'symlinked-libs'
+_ARMEABI_SUBDIR = 'armeabi'
+_GRADLE_BUILD_FILE = 'build.gradle'
+_CMAKE_FILE = 'CMakeLists.txt'
+# This needs to come first alphabetically among all modules.
+_MODULE_ALL = '_all'
+_SRC_INTERNAL = os.path.join(
+    os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk'
+
+_DEFAULT_TARGETS = [
+    '//android_webview/test/embedded_test_server:aw_net_test_support_apk',
+    '//android_webview/test:webview_instrumentation_apk',
+    '//android_webview/test:webview_instrumentation_test_apk',
+    '//base:base_junit_tests',
+    '//chrome/android:chrome_junit_tests',
+    '//chrome/android:chrome_public_apk',
+    '//chrome/android:chrome_public_test_apk',
+    '//content/public/android:content_junit_tests',
+    '//content/shell/android:content_shell_apk',
+    # Below must be included even with --all since they are libraries.
+    '//base/android/jni_generator:jni_processor',
+    '//tools/android/errorprone_plugin:errorprone_plugin_java',
+]
+
+_EXCLUDED_PREBUILT_JARS = [
+    # Android Studio already provides Desugar runtime.
+    # Including it would cause linking error because of a duplicate class.
+    'lib.java/third_party/bazel/desugar/Desugar-runtime.jar'
+]
+
+
+def _TemplatePath(name):
+  return os.path.join(_FILE_DIR, '{}.jinja'.format(name))
+
+
+def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
+  """Makes the given path(s) relative to new_cwd, or absolute if not specified.
+
+  If new_cwd is not specified, absolute paths are returned.
+  If old_cwd is not specified, constants.GetOutDirectory() is assumed.
+  """
+  if path_or_list is None:
+    return []
+  if not isinstance(path_or_list, basestring):
+    return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list]
+  if old_cwd is None:
+    old_cwd = constants.GetOutDirectory()
+  old_cwd = os.path.abspath(old_cwd)
+  if new_cwd:
+    new_cwd = os.path.abspath(new_cwd)
+    return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd)
+  return os.path.abspath(os.path.join(old_cwd, path_or_list))
+
+
+def _IsSubpathOf(child, parent):
+  """Returns whether |child| is a subpath of |parent|."""
+  return not os.path.relpath(child, parent).startswith(os.pardir)
+
+
+def _WriteFile(path, data):
+  """Writes |data| to |path|, constucting parent directories if necessary."""
+  logging.info('Writing %s', path)
+  dirname = os.path.dirname(path)
+  if not os.path.exists(dirname):
+    os.makedirs(dirname)
+  with codecs.open(path, 'w', 'utf-8') as output_file:
+    output_file.write(data)
+
+
+def _RunGnGen(output_dir, args=None):
+  cmd = [os.path.join(_DEPOT_TOOLS_PATH, 'gn'), 'gen', output_dir]
+  if args:
+    cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _RunNinja(output_dir, args):
+  # Don't use version within _DEPOT_TOOLS_PATH, since most devs don't use
+  # that one when building.
+  cmd = ['autoninja', '-C', output_dir]
+  cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _QueryForAllGnTargets(output_dir):
+  cmd = [
+      os.path.join(_BUILD_ANDROID, 'list_java_targets.py'), '--gn-labels',
+      '--nested', '--build', '--output-directory', output_dir
+  ]
+  logging.info('Running: %r', cmd)
+  return subprocess.check_output(cmd).splitlines()
+
+
+class _ProjectEntry(object):
+  """Helper class for project entries."""
+
+  _cached_entries = {}
+
+  def __init__(self, gn_target):
+    # Use _ProjectEntry.FromGnTarget instead for caching.
+    self._gn_target = gn_target
+    self._build_config = None
+    self._java_files = None
+    self._all_entries = None
+    self.android_test_entries = []
+
+  @classmethod
+  def FromGnTarget(cls, gn_target):
+    assert gn_target.startswith('//'), gn_target
+    if ':' not in gn_target:
+      gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target))
+    if gn_target not in cls._cached_entries:
+      cls._cached_entries[gn_target] = cls(gn_target)
+    return cls._cached_entries[gn_target]
+
+  @classmethod
+  def FromBuildConfigPath(cls, path):
+    prefix = 'gen/'
+    suffix = '.build_config'
+    assert path.startswith(prefix) and path.endswith(suffix), path
+    subdir = path[len(prefix):-len(suffix)]
+    gn_target = '//%s:%s' % (os.path.split(subdir))
+    return cls.FromGnTarget(gn_target)
+
+  def __hash__(self):
+    return hash(self._gn_target)
+
+  def __eq__(self, other):
+    return self._gn_target == other.GnTarget()
+
+  def GnTarget(self):
+    return self._gn_target
+
+  def NinjaTarget(self):
+    return self._gn_target[2:]
+
+  def GnBuildConfigTarget(self):
+    return '%s__build_config_crbug_908819' % self._gn_target
+
+  def GradleSubdir(self):
+    """Returns the output subdirectory."""
+    ninja_target = self.NinjaTarget()
+    # Support targets at the root level. e.g. //:foo
+    if ninja_target[0] == ':':
+      ninja_target = ninja_target[1:]
+    return ninja_target.replace(':', os.path.sep)
+
+  def GeneratedJavaSubdir(self):
+    return _RebasePath(
+        os.path.join('gen', self.GradleSubdir(), _GENERATED_JAVA_SUBDIR))
+
+  def ProjectName(self):
+    """Returns the Gradle project name."""
+    return self.GradleSubdir().replace(os.path.sep, '.')
+
+  def BuildConfig(self):
+    """Reads and returns the project's .build_config JSON."""
+    if not self._build_config:
+      path = os.path.join('gen', self.GradleSubdir() + '.build_config')
+      with open(_RebasePath(path)) as jsonfile:
+        self._build_config = json.load(jsonfile)
+    return self._build_config
+
+  def DepsInfo(self):
+    return self.BuildConfig()['deps_info']
+
+  def Gradle(self):
+    return self.BuildConfig()['gradle']
+
+  def Javac(self):
+    return self.BuildConfig()['javac']
+
+  def GetType(self):
+    """Returns the target type from its .build_config."""
+    return self.DepsInfo()['type']
+
+  def IsValid(self):
+    return self.GetType() in (
+        'android_apk',
+        'android_app_bundle_module',
+        'java_library',
+        "java_annotation_processor",
+        'java_binary',
+        'junit_binary',
+    )
+
+  def ResSources(self):
+    return self.DepsInfo().get('lint_resource_sources', [])
+
+  def JavaFiles(self):
+    if self._java_files is None:
+      java_sources_file = self.DepsInfo().get('java_sources_file')
+      java_files = []
+      if java_sources_file:
+        java_sources_file = _RebasePath(java_sources_file)
+        java_files = build_utils.ReadSourcesList(java_sources_file)
+      self._java_files = java_files
+    return self._java_files
+
+  def PrebuiltJars(self):
+    all_jars = self.Gradle().get('dependent_prebuilt_jars', [])
+    return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS]
+
+  def AllEntries(self):
+    """Returns a list of all entries that the current entry depends on.
+
+    This includes the entry itself to make iterating simpler."""
+    if self._all_entries is None:
+      logging.debug('Generating entries for %s', self.GnTarget())
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_android_projects']]
+      deps.extend(_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_java_projects'])
+      all_entries = set()
+      for dep in deps:
+        all_entries.update(dep.AllEntries())
+      all_entries.add(self)
+      self._all_entries = list(all_entries)
+    return self._all_entries
+
+
+class _ProjectContextGenerator(object):
+  """Helper class to generate gradle build files"""
+  def __init__(self, project_dir, build_vars, use_gradle_process_resources,
+               jinja_processor, split_projects, channel):
+    self.project_dir = project_dir
+    self.build_vars = build_vars
+    self.use_gradle_process_resources = use_gradle_process_resources
+    self.jinja_processor = jinja_processor
+    self.split_projects = split_projects
+    self.channel = channel
+    self.processed_java_dirs = set()
+    self.processed_prebuilts = set()
+    self.processed_res_dirs = set()
+
+  def _GenJniLibs(self, root_entry):
+    libraries = []
+    for entry in self._GetEntries(root_entry):
+      libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
+    if libraries:
+      return _CreateJniLibsDir(constants.GetOutDirectory(),
+          self.EntryOutputDir(root_entry), libraries)
+    return []
+
+  def _GenJavaDirs(self, root_entry):
+    java_files = []
+    for entry in self._GetEntries(root_entry):
+      java_files += entry.JavaFiles()
+    java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes(
+        constants.GetOutDirectory(), java_files)
+    return java_dirs, excludes
+
+  def _GenCustomManifest(self, entry):
+    """Returns the path to the generated AndroidManifest.xml.
+
+    Gradle uses package id from manifest when generating R.class. So, we need
+    to generate a custom manifest if we let gradle process resources. We cannot
+    simply set android.defaultConfig.applicationId because it is not supported
+    for library targets."""
+    resource_packages = entry.Javac().get('resource_packages')
+    if not resource_packages:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'unknown package. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+    elif len(resource_packages) > 1:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'multiple packages. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+
+    variables = {'package': resource_packages[0]}
+    data = self.jinja_processor.Render(_TemplatePath('manifest'), variables)
+    output_file = os.path.join(
+        self.EntryOutputDir(entry), 'AndroidManifest.xml')
+    _WriteFile(output_file, data)
+
+    return output_file
+
+  def _Relativize(self, entry, paths):
+    return _RebasePath(paths, self.EntryOutputDir(entry))
+
+  def _GetEntries(self, entry):
+    if self.split_projects:
+      return [entry]
+    return entry.AllEntries()
+
+  def EntryOutputDir(self, entry):
+    return os.path.join(self.project_dir, entry.GradleSubdir())
+
+  def GeneratedInputs(self, root_entry):
+    generated_inputs = set()
+    for entry in self._GetEntries(root_entry):
+      generated_inputs.update(entry.PrebuiltJars())
+    return generated_inputs
+
+  def GenerateManifest(self, root_entry):
+    android_manifest = root_entry.DepsInfo().get('android_manifest')
+    if not android_manifest:
+      android_manifest = self._GenCustomManifest(root_entry)
+    return self._Relativize(root_entry, android_manifest)
+
+  def Generate(self, root_entry):
+    # TODO(agrieve): Add an option to use interface jars and see if that speeds
+    # things up at all.
+    variables = {}
+    java_dirs, excludes = self._GenJavaDirs(root_entry)
+    java_dirs.extend(
+        e.GeneratedJavaSubdir() for e in self._GetEntries(root_entry))
+    self.processed_java_dirs.update(java_dirs)
+    java_dirs.sort()
+    variables['java_dirs'] = self._Relativize(root_entry, java_dirs)
+    variables['java_excludes'] = excludes
+    variables['jni_libs'] = self._Relativize(
+        root_entry, set(self._GenJniLibs(root_entry)))
+    prebuilts = set(
+        p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
+    self.processed_prebuilts.update(prebuilts)
+    variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
+    res_sources_files = _RebasePath(
+        set(p for e in self._GetEntries(root_entry) for p in e.ResSources()))
+    res_sources = []
+    for res_sources_file in res_sources_files:
+      res_sources.extend(build_utils.ReadSourcesList(res_sources_file))
+    res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources)
+    # Do not add generated resources for the all module since it creates many
+    # duplicates, and currently resources are only used for editing.
+    self.processed_res_dirs.update(res_dirs)
+    variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
+    if self.split_projects:
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_android_projects']]
+      variables['android_project_deps'] = [d.ProjectName() for d in deps]
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_java_projects']]
+      variables['java_project_deps'] = [d.ProjectName() for d in deps]
+    return variables
+
+
+def _ComputeJavaSourceDirs(java_files):
+  """Returns a dictionary of source dirs with each given files in one."""
+  found_roots = {}
+  for path in java_files:
+    path_root = path
+    # Recognize these tokens as top-level.
+    while True:
+      path_root = os.path.dirname(path_root)
+      basename = os.path.basename(path_root)
+      assert basename, 'Failed to find source dir for ' + path
+      if basename in ('java', 'src'):
+        break
+      if basename in ('javax', 'org', 'com'):
+        path_root = os.path.dirname(path_root)
+        break
+    if path_root not in found_roots:
+      found_roots[path_root] = []
+    found_roots[path_root].append(path)
+  return found_roots
+
+
+def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
+  """Returns exclude patters to exclude unwanted files but keep wanted files.
+
+  - Shortens exclude list by globbing if possible.
+  - Exclude patterns are relative paths from the parent directory.
+  """
+  excludes = []
+  files_to_include = set(wanted_files)
+  files_to_exclude = set(unwanted_files)
+  while files_to_exclude:
+    unwanted_file = files_to_exclude.pop()
+    target_exclude = os.path.join(
+        os.path.dirname(unwanted_file), '*.java')
+    found_files = set(glob.glob(target_exclude))
+    valid_files = found_files & files_to_include
+    if valid_files:
+      excludes.append(os.path.relpath(unwanted_file, parent_dir))
+    else:
+      excludes.append(os.path.relpath(target_exclude, parent_dir))
+      files_to_exclude -= found_files
+  return excludes
+
+
+def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+  """Computes the list of java source directories and exclude patterns.
+
+  1. Computes the root java source directories from the list of files.
+  2. Compute exclude patterns that exclude all extra files only.
+  3. Returns the list of java source directories and exclude patterns.
+  """
+  java_dirs = []
+  excludes = []
+  if java_files:
+    java_files = _RebasePath(java_files)
+    computed_dirs = _ComputeJavaSourceDirs(java_files)
+    java_dirs = computed_dirs.keys()
+    all_found_java_files = set()
+
+    for directory, files in computed_dirs.iteritems():
+      found_java_files = build_utils.FindInDirectory(directory, '*.java')
+      all_found_java_files.update(found_java_files)
+      unwanted_java_files = set(found_java_files) - set(files)
+      if unwanted_java_files:
+        logging.debug('Directory requires excludes: %s', directory)
+        excludes.extend(
+            _ComputeExcludeFilters(files, unwanted_java_files, directory))
+
+    missing_java_files = set(java_files) - all_found_java_files
+    # Warn only about non-generated files that are missing.
+    missing_java_files = [p for p in missing_java_files
+                          if not p.startswith(output_dir)]
+    if missing_java_files:
+      logging.warning(
+          'Some java files were not found: %s', missing_java_files)
+
+  return java_dirs, excludes
+
+
+def _CreateRelativeSymlink(target_path, link_path):
+  link_dir = os.path.dirname(link_path)
+  relpath = os.path.relpath(target_path, link_dir)
+  logging.debug('Creating symlink %s -> %s', link_path, relpath)
+  os.symlink(relpath, link_path)
+
+
+def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
+  """Creates directory with symlinked .so files if necessary.
+
+  Returns list of JNI libs directories."""
+
+  if so_files:
+    symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR)
+    shutil.rmtree(symlink_dir, True)
+    abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR)
+    if not os.path.exists(abi_dir):
+      os.makedirs(abi_dir)
+    for so_file in so_files:
+      target_path = os.path.join(output_dir, so_file)
+      symlinked_path = os.path.join(abi_dir, so_file)
+      _CreateRelativeSymlink(target_path, symlinked_path)
+
+    return [symlink_dir]
+
+  return []
+
+
+def _GenerateLocalProperties(sdk_dir):
+  """Returns the data for local.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      'sdk.dir=%s' % sdk_dir,
+      '',
+  ])
+
+
+def _GenerateGradleWrapperPropertiesCanary():
+  """Returns the data for gradle-wrapper.properties as a string."""
+  # Before May 2020, this wasn't necessary. Might not be necessary at some point
+  # in the future?
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      ('distributionUrl=https\\://services.gradle.org/distributions/'
+       'gradle-6.5-rc-1-all.zip\n'),
+      '',
+  ])
+
+
+def _GenerateGradleProperties():
+  """Returns the data for gradle.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      '',
+      '# Tells Gradle to show warnings during project sync.',
+      'org.gradle.warning.mode=all',
+      '',
+  ])
+
+
+def _GenerateBaseVars(generator, build_vars):
+  variables = {}
+  variables['compile_sdk_version'] = (
+      'android-%s' % build_vars['compile_sdk_version'])
+  target_sdk_version = build_vars['android_sdk_version']
+  if target_sdk_version.isalpha():
+    target_sdk_version = '"{}"'.format(target_sdk_version)
+  variables['target_sdk_version'] = target_sdk_version
+  variables['use_gradle_process_resources'] = (
+      generator.use_gradle_process_resources)
+  variables['channel'] = generator.channel
+  return variables
+
+
+def _GenerateGradleFile(entry, generator, build_vars, jinja_processor):
+  """Returns the data for a project's build.gradle."""
+  deps_info = entry.DepsInfo()
+  variables = _GenerateBaseVars(generator, build_vars)
+  sourceSetName = 'main'
+
+  if deps_info['type'] == 'android_apk':
+    target_type = 'android_apk'
+  elif deps_info['type'] in ('java_library', 'java_annotation_processor'):
+    is_prebuilt = deps_info.get('is_prebuilt', False)
+    gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False)
+    if is_prebuilt or gradle_treat_as_prebuilt:
+      return None
+    elif deps_info['requires_android']:
+      target_type = 'android_library'
+    else:
+      target_type = 'java_library'
+  elif deps_info['type'] == 'java_binary':
+    target_type = 'java_binary'
+    variables['main_class'] = deps_info.get('main_class')
+  elif deps_info['type'] == 'junit_binary':
+    target_type = 'android_junit'
+    sourceSetName = 'test'
+  else:
+    return None
+
+  variables['target_name'] = os.path.splitext(deps_info['name'])[0]
+  variables['template_type'] = target_type
+  variables['main'] = {}
+  variables[sourceSetName] = generator.Generate(entry)
+  variables['main']['android_manifest'] = generator.GenerateManifest(entry)
+
+  if entry.android_test_entries:
+    variables['android_test'] = []
+    for e in entry.android_test_entries:
+      test_entry = generator.Generate(e)
+      test_entry['android_manifest'] = generator.GenerateManifest(e)
+      variables['android_test'].append(test_entry)
+      for key, value in test_entry.iteritems():
+        if isinstance(value, list):
+          test_entry[key] = sorted(set(value) - set(variables['main'][key]))
+
+  return jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+
+
+# Example: //chrome/android:monochrome
+def _GetNative(relative_func, target_names):
+  """Returns an object containing native c++ sources list and its included path
+
+  Iterate through all target_names and their deps to get the list of included
+  paths and sources."""
+  out_dir = constants.GetOutDirectory()
+  with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
+    projects = json.load(project_file)
+  project_targets = projects['targets']
+  root_dir = projects['build_settings']['root_path']
+  includes = set()
+  processed_target = set()
+  targets_stack = list(target_names)
+  sources = []
+
+  while targets_stack:
+    target_name = targets_stack.pop()
+    if target_name in processed_target:
+      continue
+    processed_target.add(target_name)
+    target = project_targets[target_name]
+    includes.update(target.get('include_dirs', []))
+    targets_stack.extend(target.get('deps', []))
+    # Ignore generated files
+    sources.extend(f for f in target.get('sources', [])
+                   if f.endswith('.cc') and not f.startswith('//out'))
+
+  def process_paths(paths):
+    # Ignores leading //
+    return relative_func(
+        sorted(os.path.join(root_dir, path[2:]) for path in paths))
+
+  return {
+      'sources': process_paths(sources),
+      'includes': process_paths(includes),
+  }
+
+
+def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
+                       jinja_processor, native_targets):
+  """Returns the data for a pseudo build.gradle of all dirs.
+
+  See //docs/android_studio.md for more details."""
+  variables = _GenerateBaseVars(generator, build_vars)
+  target_type = 'android_apk'
+  variables['target_name'] = _MODULE_ALL
+  variables['template_type'] = target_type
+  java_dirs = sorted(generator.processed_java_dirs)
+  prebuilts = sorted(generator.processed_prebuilts)
+  res_dirs = sorted(generator.processed_res_dirs)
+  def Relativize(paths):
+    return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL))
+
+  # As after clank modularization, the java and javatests code will live side by
+  # side in the same module, we will list both of them in the main target here.
+  main_java_dirs = [d for d in java_dirs if 'junit/' not in d]
+  junit_test_java_dirs = [d for d in java_dirs if 'junit/' in d]
+  variables['main'] = {
+      'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
+      'java_dirs': Relativize(main_java_dirs),
+      'prebuilts': Relativize(prebuilts),
+      'java_excludes': ['**/*.java'],
+      'res_dirs': Relativize(res_dirs),
+  }
+  variables['android_test'] = [{
+      'java_dirs': Relativize(junit_test_java_dirs),
+      'java_excludes': ['**/*.java'],
+  }]
+  if native_targets:
+    variables['native'] = _GetNative(
+        relative_func=Relativize, target_names=native_targets)
+  data = jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+  _WriteFile(
+      os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data)
+  if native_targets:
+    cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables)
+    _WriteFile(
+        os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
+
+
+def _GenerateRootGradle(jinja_processor, channel):
+  """Returns the data for the root project's build.gradle."""
+  return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+
+
+def _GenerateSettingsGradle(project_entries):
+  """Returns the data for settings.gradle."""
+  project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT))
+  lines = []
+  lines.append('// Generated by //build/android/gradle/generate_gradle.py')
+  lines.append('rootProject.name = "%s"' % project_name)
+  lines.append('rootProject.projectDir = settingsDir')
+  lines.append('')
+  for name, subdir in project_entries:
+    # Example target:
+    # android_webview:android_webview_java__build_config_crbug_908819
+    lines.append('include ":%s"' % name)
+    lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' %
+                 (name, subdir))
+  return '\n'.join(lines)
+
+
+def _FindAllProjectEntries(main_entries):
+  """Returns the list of all _ProjectEntry instances given the root project."""
+  found = set()
+  to_scan = list(main_entries)
+  while to_scan:
+    cur_entry = to_scan.pop()
+    if cur_entry in found:
+      continue
+    found.add(cur_entry)
+    sub_config_paths = cur_entry.DepsInfo()['deps_configs']
+    to_scan.extend(
+        _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths)
+  return list(found)
+
+
+def _CombineTestEntries(entries):
+  """Combines test apks into the androidTest source set of their target.
+
+  - Speeds up android studio
+  - Adds proper dependency between test and apk_under_test
+  - Doesn't work for junit yet due to resulting circular dependencies
+    - e.g. base_junit_tests > base_junit_test_support > base_java
+  """
+  combined_entries = []
+  android_test_entries = collections.defaultdict(list)
+  for entry in entries:
+    target_name = entry.GnTarget()
+    if (target_name.endswith(_INSTRUMENTATION_TARGET_SUFFIX)
+        and 'apk_under_test' in entry.Gradle()):
+      apk_name = entry.Gradle()['apk_under_test']
+      android_test_entries[apk_name].append(entry)
+    else:
+      combined_entries.append(entry)
+  for entry in combined_entries:
+    target_name = entry.DepsInfo()['name']
+    if target_name in android_test_entries:
+      entry.android_test_entries = android_test_entries[target_name]
+      del android_test_entries[target_name]
+  # Add unmatched test entries as individual targets.
+  combined_entries.extend(e for l in android_test_entries.values() for e in l)
+  return combined_entries
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('-v',
+                      '--verbose',
+                      dest='verbose_count',
+                      default=0,
+                      action='count',
+                      help='Verbose level')
+  parser.add_argument('--target',
+                      dest='targets',
+                      action='append',
+                      help='GN target to generate project for. Replaces set of '
+                           'default targets. May be repeated.')
+  parser.add_argument('--extra-target',
+                      dest='extra_targets',
+                      action='append',
+                      help='GN target to generate project for, in addition to '
+                           'the default ones. May be repeated.')
+  parser.add_argument('--project-dir',
+                      help='Root of the output project.',
+                      default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
+  parser.add_argument('--all',
+                      action='store_true',
+                      help='Include all .java files reachable from any '
+                           'apk/test/binary target. On by default unless '
+                           '--split-projects is used (--split-projects can '
+                           'slow down Studio given too many targets).')
+  parser.add_argument('--use-gradle-process-resources',
+                      action='store_true',
+                      help='Have gradle generate R.java rather than ninja')
+  parser.add_argument('--split-projects',
+                      action='store_true',
+                      help='Split projects by their gn deps rather than '
+                           'combining all the dependencies of each target')
+  parser.add_argument('--native-target',
+                      dest='native_targets',
+                      action='append',
+                      help='GN native targets to generate for. May be '
+                           'repeated.')
+  parser.add_argument('--compile-sdk-version',
+                      type=int,
+                      default=0,
+                      help='Override compileSdkVersion for android sdk docs. '
+                           'Useful when sources for android_sdk_version is '
+                           'not available in Android Studio.')
+  parser.add_argument(
+      '--sdk-path',
+      default=os.path.expanduser('~/Android/Sdk'),
+      help='The path to use as the SDK root, overrides the '
+      'default at ~/Android/Sdk.')
+  version_group = parser.add_mutually_exclusive_group()
+  version_group.add_argument('--beta',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Beta.')
+  version_group.add_argument('--canary',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Canary.')
+  args = parser.parse_args()
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  constants.CheckOutputDirectory()
+  output_dir = constants.GetOutDirectory()
+  devil_chromium.Initialize(output_directory=output_dir)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if args.use_gradle_process_resources:
+    assert args.split_projects, (
+        'Gradle resources does not work without --split-projects.')
+
+  _gradle_output_dir = os.path.abspath(
+      args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
+  logging.warning('Creating project at: %s', _gradle_output_dir)
+
+  # Generate for "all targets" by default when not using --split-projects (too
+  # slow), and when no --target has been explicitly set. "all targets" means all
+  # java targets that are depended on by an apk or java_binary (leaf
+  # java_library targets will not be included).
+  args.all = args.all or (not args.split_projects and not args.targets)
+
+  targets_from_args = set(args.targets or _DEFAULT_TARGETS)
+  if args.extra_targets:
+    targets_from_args.update(args.extra_targets)
+
+  if args.all:
+    if args.native_targets:
+      _RunGnGen(output_dir, ['--ide=json'])
+    elif not os.path.exists(os.path.join(output_dir, 'build.ninja')):
+      _RunGnGen(output_dir)
+    else:
+      # Faster than running "gn gen" in the no-op case.
+      _RunNinja(output_dir, ['build.ninja'])
+    # Query ninja for all __build_config_crbug_908819 targets.
+    targets = _QueryForAllGnTargets(output_dir)
+  else:
+    assert not args.native_targets, 'Native editing requires --all.'
+    targets = [
+        re.sub(r'_test_apk$', _INSTRUMENTATION_TARGET_SUFFIX, t)
+        for t in targets_from_args
+    ]
+    # Necessary after "gn clean"
+    if not os.path.exists(
+        os.path.join(output_dir, gn_helpers.BUILD_VARS_FILENAME)):
+      _RunGnGen(output_dir)
+
+  build_vars = gn_helpers.ReadBuildVars(output_dir)
+  jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
+  if args.beta:
+    channel = 'beta'
+  elif args.canary:
+    channel = 'canary'
+  else:
+    channel = 'stable'
+  if args.compile_sdk_version:
+    build_vars['compile_sdk_version'] = args.compile_sdk_version
+  else:
+    build_vars['compile_sdk_version'] = build_vars['android_sdk_version']
+  generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
+      args.use_gradle_process_resources, jinja_processor, args.split_projects,
+      channel)
+
+  main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
+
+  if args.all:
+    # There are many unused libraries, so restrict to those that are actually
+    # used by apks/bundles/binaries/tests or that are explicitly mentioned in
+    # --targets.
+    BASE_TYPES = ('android_apk', 'android_app_bundle_module', 'java_binary',
+                  'junit_binary')
+    main_entries = [
+        e for e in main_entries
+        if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args
+            or e.GnTarget().endswith(_INSTRUMENTATION_TARGET_SUFFIX))
+    ]
+
+  if args.split_projects:
+    main_entries = _FindAllProjectEntries(main_entries)
+
+  logging.info('Generating for %d targets.', len(main_entries))
+
+  entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
+  logging.info('Creating %d projects for targets.', len(entries))
+
+  logging.warning('Writing .gradle files...')
+  project_entries = []
+  # When only one entry will be generated we want it to have a valid
+  # build.gradle file with its own AndroidManifest.
+  for entry in entries:
+    data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor)
+    if data and not args.all:
+      project_entries.append((entry.ProjectName(), entry.GradleSubdir()))
+      _WriteFile(
+          os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE),
+          data)
+  if args.all:
+    project_entries.append((_MODULE_ALL, _MODULE_ALL))
+    _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
+                       jinja_processor, args.native_targets)
+
+  _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
+             _GenerateRootGradle(jinja_processor, channel))
+
+  _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
+             _GenerateSettingsGradle(project_entries))
+
+  # Ensure the Android Studio sdk is correctly initialized.
+  if not os.path.exists(args.sdk_path):
+    # Help first-time users avoid Android Studio forcibly changing back to
+    # the previous default due to not finding a valid sdk under this dir.
+    shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path)
+  _WriteFile(
+      os.path.join(generator.project_dir, 'local.properties'),
+      _GenerateLocalProperties(args.sdk_path))
+  _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'),
+             _GenerateGradleProperties())
+
+  wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper',
+                                    'gradle-wrapper.properties')
+  if os.path.exists(wrapper_properties):
+    os.unlink(wrapper_properties)
+  if args.canary:
+    _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary())
+
+  generated_inputs = set()
+  for entry in entries:
+    entries_to_gen = [entry]
+    entries_to_gen.extend(entry.android_test_entries)
+    for entry_to_gen in entries_to_gen:
+      # Build all paths references by .gradle that exist within output_dir.
+      generated_inputs.update(generator.GeneratedInputs(entry_to_gen))
+  if generated_inputs:
+    targets = _RebasePath(generated_inputs, output_dir)
+    _RunNinja(output_dir, targets)
+
+  logging.warning('Generated files will only appear once you\'ve built them.')
+  logging.warning('Generated projects for Android Studio %s', channel)
+  logging.warning('For more tips: https://chromium.googlesource.com/chromium'
+                  '/src.git/+/master/docs/android_studio.md')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gradle/gn_to_cmake.py b/src/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 0000000..d3e80ae
--- /dev/null
+++ b/src/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,689 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+from __future__ import print_function
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+  """Escapes the string 'a' for use inside a CMake string.
+
+  This means escaping
+  '\' otherwise it may be seen as modifying the next character
+  '"' otherwise it will end the string
+  ';' otherwise the string becomes a list
+
+  The following do not need to be escaped
+  '#' when the lexer is in string state, this does not start a comment
+  """
+  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+  """Escapes the string 'a' for use as a CMake target name.
+
+  CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+  The ':' is only allowed for imported targets.
+  """
+  def Escape(c):
+    if c in string.ascii_letters or c in string.digits or c in '_.+-':
+      return c
+    else:
+      return '__'
+  return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+  """Sets a CMake variable."""
+  out.write('set("')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('" "')
+  out.write(CMakeStringEscape(value))
+  out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+  """Sets a CMake variable to a list."""
+  if not values:
+    return SetVariable(out, variable_name, "")
+  if len(values) == 1:
+    return SetVariable(out, variable_name, values[0])
+  out.write('list(APPEND "')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('"\n  "')
+  out.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
+  out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+  """Given a set of source files, sets the given property on them."""
+  output.write('set_source_files_properties(')
+  WriteVariable(output, variable)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+  """Given a target, sets the given property."""
+  out.write('set_target_properties("${target}" PROPERTIES ')
+  out.write(property_name)
+  out.write(' "')
+  for value in values:
+    out.write(CMakeStringEscape(value))
+    out.write(sep)
+  out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+  if prepend:
+    output.write(prepend)
+  output.write('${')
+  output.write(variable_name)
+  output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+  '.cc': 'cxx',
+  '.cpp': 'cxx',
+  '.cxx': 'cxx',
+  '.c': 'c',
+  '.s': 'asm',
+  '.S': 'asm',
+  '.asm': 'asm',
+  '.o': 'obj',
+  '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+  def __init__(self, command, modifier, property_modifier, is_linkable):
+    self.command = command
+    self.modifier = modifier
+    self.property_modifier = property_modifier
+    self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+                                         None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+  'unknown': CMakeTargetType.custom,
+  'group': CMakeTargetType.custom,
+  'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+  'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+  'copy': CMakeTargetType.custom,
+  'action': CMakeTargetType.custom,
+  'action_foreach': CMakeTargetType.custom,
+  'bundle_data': CMakeTargetType.custom,
+  'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+  return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+  # See <chromium>/src/tools/gn/label.cc#Resolve
+  # //base/test:test_support(//build/toolchain/win:msvc)
+  path_separator = FindFirstOf(gn_target_name, (':', '('))
+  location = None
+  name = None
+  toolchain = None
+  if not path_separator:
+    location = gn_target_name[2:]
+  else:
+    location = gn_target_name[2:path_separator]
+    toolchain_separator = gn_target_name.find('(', path_separator)
+    if toolchain_separator == -1:
+      name = gn_target_name[path_separator + 1:]
+    else:
+      if toolchain_separator > path_separator:
+        name = gn_target_name[path_separator + 1:toolchain_separator]
+      assert gn_target_name.endswith(')')
+      toolchain = gn_target_name[toolchain_separator + 1:-1]
+  assert location or name
+
+  cmake_target_name = None
+  if location.endswith('/' + name):
+    cmake_target_name = location
+  elif location:
+    cmake_target_name = location + '_' + name
+  else:
+    cmake_target_name = name
+  if toolchain:
+    cmake_target_name += '--' + toolchain
+  return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+  def __init__(self, project_json):
+    self.targets = project_json['targets']
+    build_settings = project_json['build_settings']
+    self.root_path = build_settings['root_path']
+    self.build_path = posixpath.join(self.root_path,
+                                     build_settings['build_dir'][2:])
+    self.object_source_deps = {}
+
+  def GetAbsolutePath(self, path):
+    if path.startswith("//"):
+      return self.root_path + "/" + path[2:]
+    else:
+      return path
+
+  def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose sources have not been absorbed."""
+    if gn_target_name in self.object_source_deps:
+      object_dependencies.update(self.object_source_deps[gn_target_name])
+      return
+    target_deps = set()
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        target_deps.add(dependency)
+      if dependency_type not in gn_target_types_that_absorb_objects:
+        self.GetObjectSourceDependencies(dependency, target_deps)
+    self.object_source_deps[gn_target_name] = target_deps
+    object_dependencies.update(target_deps)
+
+  def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose libraries have not been absorbed."""
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        object_dependencies.add(dependency)
+        self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+  def __init__(self, gn_target_name, project):
+    self.gn_name = gn_target_name
+    self.properties = project.targets[self.gn_name]
+    self.cmake_name = GetCMakeTargetName(self.gn_name)
+    self.gn_type = self.properties.get('type', None)
+    self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+  outputs = []
+  output_directories = set()
+  for output in target.properties.get('outputs', []):
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+    output_directory = posixpath.dirname(output_abs_path)
+    if output_directory:
+      output_directories.add(output_directory)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  if output_directories:
+    out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+    out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+    out.write('"\n')
+
+  script = target.properties['script']
+  arguments = target.properties['args']
+  out.write('  COMMAND python "')
+  out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+  out.write('"')
+  if arguments:
+    out.write('\n    "')
+    out.write('"\n    "'.join([CMakeStringEscape(a) for a in arguments]))
+    out.write('"')
+  out.write('\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  #TODO: CMake 3.7 is introducing DEPFILE
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Action: ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+  source_dir, source_file_part = posixpath.split(source)
+  source_name_part, _ = posixpath.splitext(source_file_part)
+  #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+  return a.replace('{{source}}', source) \
+          .replace('{{source_file_part}}', source_file_part) \
+          .replace('{{source_name_part}}', source_name_part) \
+          .replace('{{source_dir}}', source_dir) \
+          .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+  all_outputs = target.properties.get('outputs', [])
+  inputs = target.properties.get('sources', [])
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs_per_input = len(all_outputs) / len(inputs)
+  for count, source in enumerate(inputs):
+    source_abs_path = project.GetAbsolutePath(source)
+
+    outputs = []
+    output_directories = set()
+    for output in all_outputs[outputs_per_input *  count:
+                              outputs_per_input * (count+1)]:
+      output_abs_path = project.GetAbsolutePath(output)
+      outputs.append(output_abs_path)
+      output_directory = posixpath.dirname(output_abs_path)
+      if output_directory:
+        output_directories.add(output_directory)
+    outputs_name = '${target}__output_' + str(count)
+    SetVariableList(out, outputs_name, outputs)
+
+    out.write('add_custom_command(OUTPUT ')
+    WriteVariable(out, outputs_name)
+    out.write('\n')
+
+    if output_directories:
+      out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+      out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+      out.write('"\n')
+
+    script = target.properties['script']
+    # TODO: need to expand {{xxx}} in arguments
+    arguments = target.properties['args']
+    out.write('  COMMAND python "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+    out.write('"')
+    if arguments:
+      out.write('\n    "')
+      expand = functools.partial(ExpandPlaceholders, source_abs_path)
+      out.write('"\n    "'.join(
+          [CMakeStringEscape(expand(a)) for a in arguments]))
+      out.write('"')
+    out.write('\n')
+
+    out.write('  DEPENDS')
+    if 'input' in sources:
+      WriteVariable(out, sources['input'], ' ')
+    out.write(' "')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    #TODO: CMake 3.7 is introducing DEPFILE
+
+    out.write('  WORKING_DIRECTORY "')
+    out.write(CMakeStringEscape(project.build_path))
+    out.write('"\n')
+
+    out.write('  COMMENT "Action ${target} on ')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    out.write('  VERBATIM)\n')
+
+    synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+  inputs = target.properties.get('sources', [])
+  raw_outputs = target.properties.get('outputs', [])
+
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs = []
+  for output in raw_outputs:
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  for src, dst in zip(inputs, outputs):
+    out.write('  COMMAND ${CMAKE_COMMAND} -E copy "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+    out.write('" "')
+    out.write(CMakeStringEscape(dst))
+    out.write('"\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Copy ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+  # Hack, set linker language to c if no c or cxx files present.
+  if not 'c' in sources and not 'cxx' in sources:
+    SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+  # Mark uncompiled sources as uncompiled.
+  if 'input' in sources:
+    SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+  if 'other' in sources:
+    SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+  # Mark object sources as linkable.
+  if 'obj' in sources:
+    SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+  # TODO: 'output_name', 'output_dir', 'output_extension'
+  # This includes using 'source_outputs' to direct compiler output.
+
+  # Includes
+  includes = target.properties.get('include_dirs', [])
+  if includes:
+    out.write('set_property(TARGET "${target}" ')
+    out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+    for include_dir in includes:
+      out.write('\n  "')
+      out.write(project.GetAbsolutePath(include_dir))
+      out.write('"')
+    out.write(')\n')
+
+  # Defines
+  defines = target.properties.get('defines', [])
+  if defines:
+    SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+  # Compile flags
+  # "arflags", "asmflags", "cflags",
+  # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+  # CMake does not have per target lang compile flags.
+  # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+  #       http://public.kitware.com/Bug/view.php?id=14857
+  flags = []
+  flags.extend(target.properties.get('cflags', []))
+  cflags_asm = target.properties.get('asmflags', [])
+  cflags_c = target.properties.get('cflags_c', [])
+  cflags_cxx = target.properties.get('cflags_cc', [])
+  if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+    flags.extend(cflags_c)
+  elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+    flags.extend(cflags_cxx)
+  else:
+    # TODO: This is broken, one cannot generally set properties on files,
+    # as other targets may require different properties on the same files.
+    if 'asm' in sources and cflags_asm:
+      SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+    if 'c' in sources and cflags_c:
+      SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+    if 'cxx' in sources and cflags_cxx:
+      SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+  if flags:
+    SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+  # Linker flags
+  ldflags = target.properties.get('ldflags', [])
+  if ldflags:
+    SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+  'executable',
+  'loadable_module',
+  'shared_library',
+  'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+  # gn separates the sheep from the goats based on file extensions.
+  # A full separation is done here because of flag handing (see Compile flags).
+  source_types = {'cxx':[], 'c':[], 'asm':[],
+                  'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+  # TODO .def files on Windows
+  for source in target.properties.get('sources', []):
+    _, ext = posixpath.splitext(source)
+    source_abs_path = project.GetAbsolutePath(source)
+    source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+  for input_path in target.properties.get('inputs', []):
+    input_abs_path = project.GetAbsolutePath(input_path)
+    source_types['input'].append(input_abs_path)
+
+  # OBJECT library dependencies need to be listed as sources.
+  # Only executables and non-OBJECT libraries may reference an OBJECT library.
+  # https://gitlab.kitware.com/cmake/cmake/issues/14778
+  if target.gn_type in gn_target_types_that_absorb_objects:
+    object_dependencies = set()
+    project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+    for dependency in object_dependencies:
+      cmake_dependency_name = GetCMakeTargetName(dependency)
+      obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+      source_types['obj_target'].append(obj_target_sources)
+
+  sources = {}
+  for source_type, sources_of_type in source_types.items():
+    if sources_of_type:
+      sources[source_type] = '${target}__' + source_type + '_srcs'
+      SetVariableList(out, sources[source_type], sources_of_type)
+  return sources
+
+
+def WriteTarget(out, target, project):
+  out.write('\n#')
+  out.write(target.gn_name)
+  out.write('\n')
+
+  if target.cmake_type is None:
+    print('Target {} has unknown target type {}, skipping.'.format(
+        target.gn_name, target.gn_type))
+    return
+
+  SetVariable(out, 'target', target.cmake_name)
+
+  sources = WriteSourceVariables(out, target, project)
+
+  synthetic_dependencies = set()
+  if target.gn_type == 'action':
+    WriteAction(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'action_foreach':
+    WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'copy':
+    WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+  out.write(target.cmake_type.command)
+  out.write('("${target}"')
+  if target.cmake_type.modifier is not None:
+    out.write(' ')
+    out.write(target.cmake_type.modifier)
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  if synthetic_dependencies:
+    out.write(' DEPENDS')
+    for synthetic_dependencie in synthetic_dependencies:
+      WriteVariable(out, synthetic_dependencie, ' ')
+  out.write(')\n')
+
+  if target.cmake_type.command != 'add_custom_target':
+    WriteCompilerFlags(out, target, project, sources)
+
+  libraries = set()
+  nonlibraries = set()
+
+  dependencies = set(target.properties.get('deps', []))
+  # Transitive OBJECT libraries are in sources.
+  # Those sources are dependent on the OBJECT library dependencies.
+  # Those sources cannot bring in library dependencies.
+  object_dependencies = set()
+  if target.gn_type != 'source_set':
+    project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+  for object_dependency in object_dependencies:
+    dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+  for dependency in dependencies:
+    gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+    cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+    cmake_dependency_name = GetCMakeTargetName(dependency)
+    if cmake_dependency_type.command != 'add_library':
+      nonlibraries.add(cmake_dependency_name)
+    elif cmake_dependency_type.modifier != 'OBJECT':
+      if target.cmake_type.is_linkable:
+        libraries.add(cmake_dependency_name)
+      else:
+        nonlibraries.add(cmake_dependency_name)
+
+  # Non-library dependencies.
+  if nonlibraries:
+    out.write('add_dependencies("${target}"')
+    for nonlibrary in nonlibraries:
+      out.write('\n  "')
+      out.write(nonlibrary)
+      out.write('"')
+    out.write(')\n')
+
+  # Non-OBJECT library dependencies.
+  external_libraries = target.properties.get('libs', [])
+  if target.cmake_type.is_linkable and (external_libraries or libraries):
+    library_dirs = target.properties.get('lib_dirs', [])
+    if library_dirs:
+      SetVariableList(out, '${target}__library_directories', library_dirs)
+
+    system_libraries = []
+    for external_library in external_libraries:
+      if '/' in external_library:
+        libraries.add(project.GetAbsolutePath(external_library))
+      else:
+        if external_library.endswith('.framework'):
+          external_library = external_library[:-len('.framework')]
+        system_library = 'library__' + external_library
+        if library_dirs:
+          system_library = system_library + '__for_${target}'
+        out.write('find_library("')
+        out.write(CMakeStringEscape(system_library))
+        out.write('" "')
+        out.write(CMakeStringEscape(external_library))
+        out.write('"')
+        if library_dirs:
+          out.write(' PATHS "')
+          WriteVariable(out, '${target}__library_directories')
+          out.write('"')
+        out.write(')\n')
+        system_libraries.append(system_library)
+    out.write('target_link_libraries("${target}"')
+    for library in libraries:
+      out.write('\n  "')
+      out.write(CMakeStringEscape(library))
+      out.write('"')
+    for system_library in system_libraries:
+      WriteVariable(out, system_library, '\n  "')
+      out.write('"')
+    out.write(')\n')
+
+
+def WriteProject(project):
+  out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+  # Update the gn generated ninja build.
+  # If a build file has changed, this will update CMakeLists.ext if
+  # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+  # style was used to create this config.
+  out.write('execute_process(COMMAND ninja -C "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('" build.ninja)\n')
+
+  out.write('include(CMakeLists.ext)\n')
+  out.close()
+
+  out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n')
+
+  # The following appears to be as-yet undocumented.
+  # http://public.kitware.com/Bug/view.php?id=8392
+  out.write('enable_language(ASM)\n\n')
+  # ASM-ATT does not support .S files.
+  # output.write('enable_language(ASM-ATT)\n')
+
+  # Current issues with automatic re-generation:
+  # The gn generated build.ninja target uses build.ninja.d
+  #   but build.ninja.d does not contain the ide or gn.
+  # Currently the ide is not run if the project.json file is not changed
+  #   but the ide needs to be run anyway if it has itself changed.
+  #   This can be worked around by deleting the project.json file.
+  out.write('file(READ "')
+  gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+  out.write(CMakeStringEscape(gn_deps_file))
+  out.write('" "gn_deps_string" OFFSET ')
+  out.write(str(len('build.ninja: ')))
+  out.write(')\n')
+  # One would think this would need to worry about escaped spaces
+  # but gn doesn't escape spaces here (it generates invalid .d files).
+  out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+  out.write('foreach("gn_dep" ${gn_deps})\n')
+  out.write('  configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+  out.write('endforeach("gn_dep")\n')
+
+  for target_name in project.targets.keys():
+    out.write('\n')
+    WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+  if len(sys.argv) != 2:
+    print('Usage: ' + sys.argv[0] + ' <json_file_name>')
+    exit(1)
+
+  json_path = sys.argv[1]
+  project = None
+  with open(json_path, 'r') as json_file:
+    project = json.loads(json_file.read())
+
+  WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+  main()
diff --git a/src/build/android/gradle/java.jinja b/src/build/android/gradle/java.jinja
new file mode 100644
index 0000000..7626f61
--- /dev/null
+++ b/src/build/android/gradle/java.jinja
@@ -0,0 +1,41 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+apply plugin: "java"
+{% if template_type == 'java_binary' %}
+apply plugin: "application"
+{% endif %}
+
+sourceSets {
+    main {
+        java.srcDirs = [
+{% for path in main.java_dirs %}
+            "{{ path }}",
+{% endfor %}
+        ]
+{% if main.java_excludes is defined %}
+        java.filter.exclude([
+{% for path in main.java_excludes %}
+            "{{ path }}",
+{% endfor %}
+        ])
+{% endif %}
+    }
+}
+
+sourceCompatibility = JavaVersion.VERSION_1_8
+targetCompatibility = JavaVersion.VERSION_1_8
+
+{% if template_type == 'java_binary' %}
+applicationName = "{{ target_name }}"
+{% if main_class %}
+mainClassName = "{{ main_class }}"
+{% endif %}
+{% endif %}
+{% if template_type in ('java_binary', 'java_library') %}
+archivesBaseName = "{{ target_name }}"
+{% endif %}
+
+{% include 'dependencies.jinja' %}
diff --git a/src/build/android/gradle/manifest.jinja b/src/build/android/gradle/manifest.jinja
new file mode 100644
index 0000000..dea7071
--- /dev/null
+++ b/src/build/android/gradle/manifest.jinja
@@ -0,0 +1,7 @@
+{# Copyright 2017 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="{{ package }}">
+</manifest>
diff --git a/src/build/android/gradle/root.jinja b/src/build/android/gradle/root.jinja
new file mode 100644
index 0000000..15b5e10
--- /dev/null
+++ b/src/build/android/gradle/root.jinja
@@ -0,0 +1,26 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+buildscript {
+    repositories {
+        google()
+        jcenter()
+{% if channel == 'canary' %}
+        // Workaround for http://b/144885480.
+        //maven() {
+        //  url "http://dl.bintray.com/kotlin/kotlin-eap"
+        //}
+{% endif %}
+    }
+    dependencies {
+{% if channel == 'canary' %}
+        classpath "com.android.tools.build:gradle:4.1.0-beta01"
+{% elif channel == 'beta' %}
+        classpath "com.android.tools.build:gradle:4.0.0-rc01"
+{% else %}
+        classpath "com.android.tools.build:gradle:4.0.1"
+{% endif %}
+    }
+}
diff --git a/src/build/android/gtest_apk/BUILD.gn b/src/build/android/gtest_apk/BUILD.gn
new file mode 100644
index 0000000..2a72bc4
--- /dev/null
+++ b/src/build/android/gtest_apk/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("native_test_instrumentation_test_runner_java") {
+  testonly = true
+  sources = [
+    "java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java",
+    "java/src/org/chromium/build/gtest_apk/NativeTestIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java",
+  ]
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
new file mode 100644
index 0000000..652333b
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
@@ -0,0 +1,281 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.ActivityManager;
+import android.app.Instrumentation;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.Process;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.SparseArray;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Queue;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ *  An Instrumentation that runs tests based on NativeTest.
+ */
+public class NativeTestInstrumentationTestRunner extends Instrumentation {
+    private static final String EXTRA_NATIVE_TEST_ACTIVITY =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.NativeTestActivity";
+    private static final String EXTRA_SHARD_NANO_TIMEOUT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardNanoTimeout";
+    private static final String EXTRA_SHARD_SIZE_LIMIT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardSizeLimit";
+    private static final String EXTRA_STDOUT_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.StdoutFile";
+    private static final String EXTRA_TEST_LIST_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.TestList";
+    private static final String EXTRA_TEST =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.Test";
+
+    private static final String TAG = "NativeTest";
+
+    private static final long DEFAULT_SHARD_NANO_TIMEOUT = 60 * 1000000000L;
+    // Default to no size limit.
+    private static final int DEFAULT_SHARD_SIZE_LIMIT = 0;
+
+    private Handler mHandler = new Handler();
+    private Bundle mLogBundle = new Bundle();
+    private SparseArray<ShardMonitor> mMonitors = new SparseArray<ShardMonitor>();
+    private String mNativeTestActivity;
+    private TestStatusReceiver mReceiver;
+    private Queue<String> mShards = new ArrayDeque<String>();
+    private long mShardNanoTimeout = DEFAULT_SHARD_NANO_TIMEOUT;
+    private int mShardSizeLimit = DEFAULT_SHARD_SIZE_LIMIT;
+    private File mStdoutFile;
+    private Bundle mTransparentArguments;
+
+    @Override
+    public void onCreate(Bundle arguments) {
+        Context context = getContext();
+        mTransparentArguments = new Bundle(arguments);
+
+        mNativeTestActivity = arguments.getString(EXTRA_NATIVE_TEST_ACTIVITY);
+        if (mNativeTestActivity == null) {
+            Log.e(TAG,
+                    "Unable to find org.chromium.native_test.NativeUnitTestActivity extra on "
+                            + "NativeTestInstrumentationTestRunner launch intent.");
+            finish(Activity.RESULT_CANCELED, new Bundle());
+            return;
+        }
+        mTransparentArguments.remove(EXTRA_NATIVE_TEST_ACTIVITY);
+
+        String shardNanoTimeout = arguments.getString(EXTRA_SHARD_NANO_TIMEOUT);
+        if (shardNanoTimeout != null) mShardNanoTimeout = Long.parseLong(shardNanoTimeout);
+        mTransparentArguments.remove(EXTRA_SHARD_NANO_TIMEOUT);
+
+        String shardSizeLimit = arguments.getString(EXTRA_SHARD_SIZE_LIMIT);
+        if (shardSizeLimit != null) mShardSizeLimit = Integer.parseInt(shardSizeLimit);
+        mTransparentArguments.remove(EXTRA_SHARD_SIZE_LIMIT);
+
+        String stdoutFile = arguments.getString(EXTRA_STDOUT_FILE);
+        if (stdoutFile != null) {
+            mStdoutFile = new File(stdoutFile);
+        } else {
+            try {
+                mStdoutFile = File.createTempFile(
+                        ".temp_stdout_", ".txt", Environment.getExternalStorageDirectory());
+                Log.i(TAG, "stdout file created: " + mStdoutFile.getAbsolutePath());
+            } catch (IOException e) {
+                Log.e(TAG, "Unable to create temporary stdout file.", e);
+                finish(Activity.RESULT_CANCELED, new Bundle());
+                return;
+            }
+        }
+
+        mTransparentArguments.remove(EXTRA_STDOUT_FILE);
+
+        String singleTest = arguments.getString(EXTRA_TEST);
+        if (singleTest != null) {
+            mShards.add(singleTest);
+        }
+
+        String testListFilePath = arguments.getString(EXTRA_TEST_LIST_FILE);
+        if (testListFilePath != null) {
+            File testListFile = new File(testListFilePath);
+            try {
+                BufferedReader testListFileReader =
+                        new BufferedReader(new FileReader(testListFile));
+
+                String test;
+                ArrayList<String> workingShard = new ArrayList<String>();
+                while ((test = testListFileReader.readLine()) != null) {
+                    workingShard.add(test);
+                    if (workingShard.size() == mShardSizeLimit) {
+                        mShards.add(TextUtils.join(":", workingShard));
+                        workingShard = new ArrayList<String>();
+                    }
+                }
+
+                if (!workingShard.isEmpty()) {
+                    mShards.add(TextUtils.join(":", workingShard));
+                }
+
+                testListFileReader.close();
+            } catch (IOException e) {
+                Log.e(TAG, "Error reading " + testListFile.getAbsolutePath(), e);
+            }
+        }
+        mTransparentArguments.remove(EXTRA_TEST_LIST_FILE);
+
+        start();
+    }
+
+    @Override
+    @SuppressLint("DefaultLocale")
+    public void onStart() {
+        super.onStart();
+
+        mReceiver = new TestStatusReceiver();
+        mReceiver.register(getContext());
+        mReceiver.registerCallback(new TestStatusReceiver.TestRunCallback() {
+            @Override
+            public void testRunStarted(int pid) {
+                if (pid != Process.myPid()) {
+                    ShardMonitor m = new ShardMonitor(pid, System.nanoTime() + mShardNanoTimeout);
+                    mMonitors.put(pid, m);
+                    mHandler.post(m);
+                }
+            }
+
+            @Override
+            public void testRunFinished(int pid) {
+                ShardMonitor m = mMonitors.get(pid);
+                if (m != null) {
+                    m.stopped();
+                    mMonitors.remove(pid);
+                }
+                mHandler.post(new ShardEnder(pid));
+            }
+
+            @Override
+            public void uncaughtException(int pid, String stackTrace) {
+                mLogBundle.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
+                        String.format("Uncaught exception in test process (pid: %d)%n%s%n", pid,
+                                stackTrace));
+                sendStatus(0, mLogBundle);
+            }
+        });
+
+        mHandler.post(new ShardStarter());
+    }
+
+    /** Monitors a test shard's execution. */
+    private class ShardMonitor implements Runnable {
+        private static final int MONITOR_FREQUENCY_MS = 1000;
+
+        private long mExpirationNanoTime;
+        private int mPid;
+        private AtomicBoolean mStopped;
+
+        public ShardMonitor(int pid, long expirationNanoTime) {
+            mPid = pid;
+            mExpirationNanoTime = expirationNanoTime;
+            mStopped = new AtomicBoolean(false);
+        }
+
+        public void stopped() {
+            mStopped.set(true);
+        }
+
+        @Override
+        public void run() {
+            if (mStopped.get()) {
+                return;
+            }
+
+            if (isAppProcessAlive(getContext(), mPid)) {
+                if (System.nanoTime() > mExpirationNanoTime) {
+                    Log.e(TAG, String.format("Test process %d timed out.", mPid));
+                    mHandler.post(new ShardEnder(mPid));
+                    return;
+                } else {
+                    mHandler.postDelayed(this, MONITOR_FREQUENCY_MS);
+                    return;
+                }
+            }
+
+            Log.e(TAG, String.format("Test process %d died unexpectedly.", mPid));
+            mHandler.post(new ShardEnder(mPid));
+        }
+    }
+
+    private static boolean isAppProcessAlive(Context context, int pid) {
+        ActivityManager activityManager =
+                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        for (ActivityManager.RunningAppProcessInfo processInfo :
+                activityManager.getRunningAppProcesses()) {
+            if (processInfo.pid == pid) return true;
+        }
+        return false;
+    }
+
+    protected Intent createShardMainIntent() {
+        Intent i = new Intent(Intent.ACTION_MAIN);
+        i.setComponent(new ComponentName(getContext().getPackageName(), mNativeTestActivity));
+        i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+        i.putExtras(mTransparentArguments);
+        if (mShards != null && !mShards.isEmpty()) {
+            String gtestFilter = mShards.remove();
+            i.putExtra(NativeTestIntent.EXTRA_GTEST_FILTER, gtestFilter);
+        }
+        i.putExtra(NativeTestIntent.EXTRA_STDOUT_FILE, mStdoutFile.getAbsolutePath());
+        return i;
+    }
+
+    /**
+     * Starts the NativeTest Activity.
+     */
+    private class ShardStarter implements Runnable {
+        @Override
+        public void run() {
+            getContext().startActivity(createShardMainIntent());
+        }
+    }
+
+    private class ShardEnder implements Runnable {
+        private static final int WAIT_FOR_DEATH_MILLIS = 10;
+
+        private int mPid;
+
+        public ShardEnder(int pid) {
+            mPid = pid;
+        }
+
+        @Override
+        public void run() {
+            if (mPid != Process.myPid()) {
+                Process.killProcess(mPid);
+                try {
+                    while (isAppProcessAlive(getContext(), mPid)) {
+                        Thread.sleep(WAIT_FOR_DEATH_MILLIS);
+                    }
+                } catch (InterruptedException e) {
+                    Log.e(TAG, String.format("%d may still be alive.", mPid), e);
+                }
+            }
+            if (mShards != null && !mShards.isEmpty()) {
+                mHandler.post(new ShardStarter());
+            } else {
+                finish(Activity.RESULT_OK, new Bundle());
+            }
+        }
+    }
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
new file mode 100644
index 0000000..a875e97
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
@@ -0,0 +1,22 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Extras for intent sent by NativeTestInstrumentationTestRunner.
+ */
+public class NativeTestIntent {
+    public static final String EXTRA_COMMAND_LINE_FILE =
+            "org.chromium.native_test.NativeTest.CommandLineFile";
+    public static final String EXTRA_COMMAND_LINE_FLAGS =
+            "org.chromium.native_test.NativeTest.CommandLineFlags";
+    public static final String EXTRA_RUN_IN_SUB_THREAD =
+            "org.chromium.native_test.NativeTest.RunInSubThread";
+    public static final String EXTRA_GTEST_FILTER =
+            "org.chromium.native_test.NativeTest.GtestFilter";
+    public static final String EXTRA_STDOUT_FILE = "org.chromium.native_test.NativeTest.StdoutFile";
+    public static final String EXTRA_COVERAGE_DEVICE_FILE =
+            "org.chromium.native_test.NativeTest.CoverageDeviceFile";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
new file mode 100644
index 0000000..520b748
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
@@ -0,0 +1,21 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Intent action and extras of broadcasts intercepted by TestStatusReceiver.
+ */
+public class TestStatusIntent {
+    public static final String ACTION_TEST_RUN_STARTED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_STARTED";
+    public static final String ACTION_TEST_RUN_FINISHED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_FINISHED";
+    public static final String ACTION_UNCAUGHT_EXCEPTION =
+            "org.chromium.test.reporter.TestStatusReporter.UNCAUGHT_EXCEPTION";
+    public static final String DATA_TYPE_RESULT = "org.chromium.test.reporter/result";
+    public static final String EXTRA_PID = "org.chromium.test.reporter.TestStatusReporter.PID";
+    public static final String EXTRA_STACK_TRACE =
+            "org.chromium.test.reporter.TestStatusReporter.STACK_TRACE";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
new file mode 100644
index 0000000..e539009
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
@@ -0,0 +1,89 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+   Receives test status broadcasts sent from
+    {@link org.chromium.test.reporter.TestStatusReporter}.
+ */
+public class TestStatusReceiver extends BroadcastReceiver {
+    private static final String TAG = "test_reporter";
+
+    private final List<TestRunCallback> mTestRunCallbacks = new ArrayList<TestRunCallback>();
+
+    /** An IntentFilter that matches the intents that this class can receive. */
+    private static final IntentFilter INTENT_FILTER;
+    static {
+        IntentFilter filter = new IntentFilter();
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_STARTED);
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_FINISHED);
+        filter.addAction(TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION);
+        try {
+            filter.addDataType(TestStatusIntent.DATA_TYPE_RESULT);
+        } catch (IntentFilter.MalformedMimeTypeException e) {
+            Log.wtf(TAG, "Invalid MIME type", e);
+        }
+        INTENT_FILTER = filter;
+    }
+
+    /** A callback used when a test run has started or finished. */
+    public interface TestRunCallback {
+        void testRunStarted(int pid);
+        void testRunFinished(int pid);
+        void uncaughtException(int pid, String stackTrace);
+    }
+
+    /** Register a callback for when a test run has started or finished. */
+    public void registerCallback(TestRunCallback c) {
+        mTestRunCallbacks.add(c);
+    }
+
+    /** Register this receiver using the provided context. */
+    public void register(Context c) {
+        c.registerReceiver(this, INTENT_FILTER);
+    }
+
+    /**
+     * Receive a broadcast intent.
+     *
+     * @param context The Context in which the receiver is running.
+     * @param intent The intent received.
+     */
+    @Override
+    public void onReceive(Context context, Intent intent) {
+        int pid = intent.getIntExtra(TestStatusIntent.EXTRA_PID, 0);
+        String stackTrace = intent.getStringExtra(TestStatusIntent.EXTRA_STACK_TRACE);
+
+        switch (intent.getAction()) {
+            case TestStatusIntent.ACTION_TEST_RUN_STARTED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunStarted(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_TEST_RUN_FINISHED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunFinished(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.uncaughtException(pid, stackTrace);
+                }
+                break;
+            default:
+                Log.e(TAG, "Unrecognized intent received: " + intent.toString());
+                break;
+        }
+    }
+}
diff --git a/src/build/android/gyp/aar.py b/src/build/android/gyp/aar.py
new file mode 100755
index 0000000..b157cd8
--- /dev/null
+++ b/src/build/android/gyp/aar.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                             os.pardir, os.pardir)))
+import gn_helpers
+
+
+_PROGUARD_TXT = 'proguard.txt'
+
+
+def _GetManifestPackage(doc):
+  """Returns the package specified in the manifest.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    String representing the package name.
+  """
+  return doc.attrib['package']
+
+
+def _IsManifestEmpty(doc):
+  """Decides whether the given manifest has merge-worthy elements.
+
+  E.g.: <activity>, <service>, etc.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    Whether the manifest has merge-worthy elements.
+  """
+  for node in doc:
+    if node.tag == 'application':
+      if list(node):
+        return False
+    elif node.tag != 'uses-sdk':
+      return False
+
+  return True
+
+
+def _CreateInfo(aar_file):
+  """Extracts and return .info data from an .aar file.
+
+  Args:
+    aar_file: Path to an input .aar file.
+
+  Returns:
+    A dict containing .info data.
+  """
+  data = {}
+  data['aidl'] = []
+  data['assets'] = []
+  data['resources'] = []
+  data['subjars'] = []
+  data['subjar_tuples'] = []
+  data['has_classes_jar'] = False
+  data['has_proguard_flags'] = False
+  data['has_native_libraries'] = False
+  data['has_r_text_file'] = False
+  with zipfile.ZipFile(aar_file) as z:
+    manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml'))
+    data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml)
+    manifest_package = _GetManifestPackage(manifest_xml)
+    if manifest_package:
+      data['manifest_package'] = manifest_package
+
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if name.startswith('aidl/'):
+        data['aidl'].append(name)
+      elif name.startswith('res/'):
+        data['resources'].append(name)
+      elif name.startswith('libs/') and name.endswith('.jar'):
+        label = posixpath.basename(name)[:-4]
+        label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+        data['subjars'].append(name)
+        data['subjar_tuples'].append([label, name])
+      elif name.startswith('assets/'):
+        data['assets'].append(name)
+      elif name.startswith('jni/'):
+        data['has_native_libraries'] = True
+        if 'native_libraries' in data:
+          data['native_libraries'].append(name)
+        else:
+          data['native_libraries'] = [name]
+      elif name == 'classes.jar':
+        data['has_classes_jar'] = True
+      elif name == _PROGUARD_TXT:
+        data['has_proguard_flags'] = True
+      elif name == 'R.txt':
+        # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+        # have no resources as well. We treat empty R.txt as having no R.txt.
+        data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+  return data
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist):
+  with build_utils.TempDir() as tmp_dir:
+    tmp_dir = os.path.join(tmp_dir, 'staging')
+    os.mkdir(tmp_dir)
+    build_utils.ExtractAll(
+        aar_file, path=tmp_dir, predicate=name_allowlist.__contains__)
+    # Write a breadcrumb so that SuperSize can attribute files back to the .aar.
+    with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
+      f.write('source={}\n'.format(aar_file))
+
+    shutil.rmtree(output_dir, ignore_errors=True)
+    shutil.move(tmp_dir, output_dir)
+
+
+def _AddCommonArgs(parser):
+  parser.add_argument(
+      'aar_file', help='Path to the AAR file.', type=os.path.normpath)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  command_parsers = parser.add_subparsers(dest='command')
+  subp = command_parsers.add_parser(
+      'list', help='Output a GN scope describing the contents of the .aar.')
+  _AddCommonArgs(subp)
+  subp.add_argument('--output', help='Output file.', default='-')
+
+  subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+  _AddCommonArgs(subp)
+  subp.add_argument(
+      '--output-dir',
+      help='Output directory for the extracted files.',
+      required=True,
+      type=os.path.normpath)
+  subp.add_argument(
+      '--assert-info-file',
+      help='Path to .info file. Asserts that it matches what '
+      '"list" would output.',
+      type=argparse.FileType('r'))
+  subp.add_argument(
+      '--ignore-resources',
+      action='store_true',
+      help='Whether to skip extraction of res/')
+
+  args = parser.parse_args()
+
+  aar_info = _CreateInfo(args.aar_file)
+  formatted_info = """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
+
+  if args.command == 'extract':
+    if args.assert_info_file:
+      cached_info = args.assert_info_file.read()
+      if formatted_info != cached_info:
+        raise Exception('android_aar_prebuilt() cached .info file is '
+                        'out-of-date. Run gn gen with '
+                        'update_android_aar_prebuilts=true to update it.')
+
+    with zipfile.ZipFile(args.aar_file) as zf:
+      names = zf.namelist()
+      if args.ignore_resources:
+        names = [n for n in names if not n.startswith('res')]
+
+    _PerformExtract(args.aar_file, args.output_dir, set(names))
+
+  elif args.command == 'list':
+    aar_output_present = args.output != '-' and os.path.isfile(args.output)
+    if aar_output_present:
+      # Some .info files are read-only, for examples the cipd-controlled ones
+      # under third_party/android_deps/repositoty. To deal with these, first
+      # that its content is correct, and if it is, exit without touching
+      # the file system.
+      file_info = open(args.output, 'r').read()
+      if file_info == formatted_info:
+        return
+
+    # Try to write the file. This may fail for read-only ones that were
+    # not updated.
+    try:
+      with open(args.output, 'w') as f:
+        f.write(formatted_info)
+    except IOError as e:
+      if not aar_output_present:
+        raise e
+      raise Exception('Could not update output file: %s\n%s\n' %
+                      (args.output, e))
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/aar.pydeps b/src/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000..7e2924b
--- /dev/null
+++ b/src/build/android/gyp/aar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/aidl.py b/src/build/android/gyp/aidl.py
new file mode 100755
index 0000000..b8099aa
--- /dev/null
+++ b/src/build/android/gyp/aidl.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  options.includes = build_utils.ParseGnList(options.includes)
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGnList(options.imports)
+      ]
+      aidl_cmd += ['-I' + s for s in options.includes]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    with build_utils.AtomicOutput(options.srcjar) as f:
+      with zipfile.ZipFile(f, 'w') as srcjar:
+        for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+          with open(path) as fileobj:
+            data = fileobj.read()
+          pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+          arcname = '%s/%s' % (
+              pkg_name.replace('.', '/'), os.path.basename(path))
+          build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+  if options.depfile:
+    include_files = []
+    for include_dir in options.includes:
+      include_files += build_utils.FindInDirectory(include_dir, '*.java')
+    build_utils.WriteDepfile(options.depfile, options.srcjar, include_files)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/aidl.pydeps b/src/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000..11c55ed
--- /dev/null
+++ b/src/build/android/gyp/aidl.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/allot_native_libraries.py b/src/build/android/gyp/allot_native_libraries.py
new file mode 100755
index 0000000..978b173
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Allots libraries to modules to be packaged into.
+
+All libraries that are depended on by a single module will be allotted to this
+module. All other libraries will be allotted to the closest ancestor.
+
+Example:
+  Given the module dependency structure
+
+        c
+       / \
+      b   d
+     /     \
+    a       e
+
+  and libraries assignment
+
+    a: ['lib1.so']
+    e: ['lib2.so', 'lib1.so']
+
+  will make the allotment decision
+
+    c: ['lib1.so']
+    e: ['lib2.so']
+
+  The above example is invoked via:
+
+    ./allot_native_libraries \
+      --libraries 'a,["1.so"]' \
+      --libraries 'e,["2.so", "1.so"]' \
+      --dep c:b \
+      --dep b:a \
+      --dep c:d \
+      --dep d:e \
+      --output <output JSON>
+"""
+
+import argparse
+import collections
+import json
+import sys
+
+from util import build_utils
+
+
+def _ModuleLibrariesPair(arg):
+  pos = arg.find(',')
+  assert pos > 0
+  return (arg[:pos], arg[pos + 1:])
+
+
+def _DepPair(arg):
+  parent, child = arg.split(':')
+  return (parent, child)
+
+
+def _PathFromRoot(module_tree, module):
+  """Computes path from root to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    module: Module to which to compute the path.
+
+  Returns:
+    Path from root the the module.
+  """
+  path = [module]
+  while module_tree.get(module):
+    module = module_tree[module]
+    path = [module] + path
+  return path
+
+
+def _ClosestCommonAncestor(module_tree, modules):
+  """Computes the common ancestor of a set of modules.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    modules: Set of modules for which to find the closest common ancestor.
+
+  Returns:
+    The closest common ancestor.
+  """
+  paths = [_PathFromRoot(module_tree, m) for m in modules]
+  assert len(paths) > 0
+  ancestor = None
+  for level in zip(*paths):
+    if len(set(level)) != 1:
+      return ancestor
+    ancestor = level[0]
+  return ancestor
+
+
+def _AllotLibraries(module_tree, libraries_map):
+  """Allot all libraries to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent. Modules can map
+      to None, which is considered the root of the tree.
+    libraries_map: Dictionary mapping each library to a set of modules, which
+      depend on the library.
+
+  Returns:
+    A dictionary mapping mapping each module name to a set of libraries allotted
+    to the module such that libraries with multiple dependees are allotted to
+    the closest ancestor.
+
+  Raises:
+    Exception if some libraries can only be allotted to the None root.
+  """
+  allotment_map = collections.defaultdict(set)
+  for library, modules in libraries_map.items():
+    ancestor = _ClosestCommonAncestor(module_tree, modules)
+    if not ancestor:
+      raise Exception('Cannot allot libraries for given dependency tree')
+    allotment_map[ancestor].add(library)
+  return allotment_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--libraries',
+      action='append',
+      type=_ModuleLibrariesPair,
+      required=True,
+      help='A pair of module name and GN list of libraries a module depends '
+      'on. Can be specified multiple times.')
+  parser.add_argument(
+      '--output',
+      required=True,
+      help='A JSON file with a key for each module mapping to a list of '
+      'libraries, which should be packaged into this module.')
+  parser.add_argument(
+      '--dep',
+      action='append',
+      type=_DepPair,
+      dest='deps',
+      default=[],
+      help='A pair of parent module name and child module name '
+      '(format: "<parent>:<child>"). Can be specified multiple times.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  options.libraries = [(m, build_utils.ParseGnList(l))
+                       for m, l in options.libraries]
+
+  # Parse input creating libraries and dependency tree.
+  libraries_map = collections.defaultdict(set)  # Maps each library to its
+  #                                               dependee modules.
+  module_tree = {}  # Maps each module name to its parent.
+  for module, libraries in options.libraries:
+    module_tree[module] = None
+    for library in libraries:
+      libraries_map[library].add(module)
+  for parent, child in options.deps:
+    if module_tree.get(child):
+      raise Exception('%s cannot have multiple parents' % child)
+    module_tree[child] = parent
+    module_tree[parent] = module_tree.get(parent)
+
+  # Allot all libraries to a module such that libraries with multiple dependees
+  # are allotted to the closest ancestor.
+  allotment_map = _AllotLibraries(module_tree, libraries_map)
+
+  # The build system expects there to be a set of libraries even for the modules
+  # that don't have any libraries allotted.
+  for module in module_tree:
+    # Creates missing sets because of defaultdict.
+    allotment_map[module] = allotment_map[module]
+
+  with open(options.output, 'w') as f:
+    # Write native libraries config and ensure the output is deterministic.
+    json.dump({m: sorted(l)
+               for m, l in allotment_map.items()},
+              f,
+              sort_keys=True,
+              indent=2)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/allot_native_libraries.pydeps b/src/build/android/gyp/allot_native_libraries.pydeps
new file mode 100644
index 0000000..d8b10cd
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py
+../../gn_helpers.py
+allot_native_libraries.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/apkbuilder.py b/src/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000..f1e6563
--- /dev/null
+++ b/src/build/android/gyp/apkbuilder.py
@@ -0,0 +1,560 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import finalize_apk
+
+from util import build_utils
+from util import diff_utils
+from util import zipalign
+
+# Input dex.jar files are zipaligned.
+zipalign.ApplyZipFileZipAlignFix()
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+                           '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+                           '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+                           '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+                           '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--assets',
+      help='GYP-list of files to add as assets in the form '
+      '"srcPath:zipPath", where ":zipPath" is optional.')
+  parser.add_argument(
+      '--java-resources', help='GYP-list of java_resources JARs to include.')
+  parser.add_argument('--write-asset-list',
+                      action='store_true',
+                      help='Whether to create an assets/assets_list file.')
+  parser.add_argument(
+      '--uncompressed-assets',
+      help='Same as --assets, except disables compression.')
+  parser.add_argument('--resource-apk',
+                      help='An .ap_ file built using aapt',
+                      required=True)
+  parser.add_argument('--output-apk',
+                      help='Path to the output file',
+                      required=True)
+  parser.add_argument('--format', choices=['apk', 'bundle-module'],
+                      default='apk', help='Specify output format.')
+  parser.add_argument('--dex-file',
+                      help='Path to the classes.dex to use')
+  parser.add_argument(
+      '--jdk-libs-dex-file',
+      help='Path to classes.dex created by dex_jdk_libs.py')
+  parser.add_argument('--uncompress-dex', action='store_true',
+                      help='Store .dex files uncompressed in the APK')
+  parser.add_argument('--native-libs',
+                      action='append',
+                      help='GYP-list of native libraries to include. '
+                           'Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--secondary-native-libs',
+                      action='append',
+                      help='GYP-list of native libraries for secondary '
+                           'android-abi. Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--android-abi',
+                      help='Android architecture to use for native libraries')
+  parser.add_argument('--secondary-android-abi',
+                      help='The secondary Android architecture to use for'
+                           'secondary native libraries')
+  parser.add_argument(
+      '--is-multi-abi',
+      action='store_true',
+      help='Will add a placeholder for the missing ABI if no native libs or '
+      'placeholders are set for either the primary or secondary ABI. Can only '
+      'be set if both --android-abi and --secondary-android-abi are set.')
+  parser.add_argument(
+      '--native-lib-placeholders',
+      help='GYP-list of native library placeholders to add.')
+  parser.add_argument(
+      '--secondary-native-lib-placeholders',
+      help='GYP-list of native library placeholders to add '
+      'for the secondary ABI')
+  parser.add_argument('--uncompress-shared-libraries', default='False',
+      choices=['true', 'True', 'false', 'False'],
+      help='Whether to uncompress native shared libraries. Argument must be '
+           'a boolean value.')
+  parser.add_argument(
+      '--apksigner-jar', help='Path to the apksigner executable.')
+  parser.add_argument('--zipalign-path',
+                      help='Path to the zipalign executable.')
+  parser.add_argument('--key-path',
+                      help='Path to keystore for signing.')
+  parser.add_argument('--key-passwd',
+                      help='Keystore password')
+  parser.add_argument('--key-name',
+                      help='Keystore name')
+  parser.add_argument(
+      '--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
+  parser.add_argument(
+      '--best-compression',
+      action='store_true',
+      help='Use zip -9 rather than zip -1')
+  parser.add_argument(
+      '--library-always-compress',
+      action='append',
+      help='The list of library files that we always compress.')
+  parser.add_argument(
+      '--library-renames',
+      action='append',
+      help='The list of library files that we prepend crazy. to their names.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+  options.assets = build_utils.ParseGnList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGnList(
+      options.uncompressed_assets)
+  options.native_lib_placeholders = build_utils.ParseGnList(
+      options.native_lib_placeholders)
+  options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+      options.secondary_native_lib_placeholders)
+  options.java_resources = build_utils.ParseGnList(options.java_resources)
+  options.native_libs = build_utils.ParseGnList(options.native_libs)
+  options.secondary_native_libs = build_utils.ParseGnList(
+      options.secondary_native_libs)
+  options.library_always_compress = build_utils.ParseGnList(
+      options.library_always_compress)
+  options.library_renames = build_utils.ParseGnList(options.library_renames)
+
+  # --apksigner-jar, --zipalign-path, --key-xxx arguments are
+  # required when building an APK, but not a bundle module.
+  if options.format == 'apk':
+    required_args = [
+        'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
+    ]
+    for required in required_args:
+      if not vars(options)[required]:
+        raise Exception('Argument --%s is required for APKs.' % (
+            required.replace('_', '-')))
+
+  options.uncompress_shared_libraries = \
+      options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+  if not options.android_abi and (options.native_libs or
+                                  options.native_lib_placeholders):
+    raise Exception('Must specify --android-abi with --native-libs')
+  if not options.secondary_android_abi and (options.secondary_native_libs or
+      options.secondary_native_lib_placeholders):
+    raise Exception('Must specify --secondary-android-abi with'
+                    ' --secondary-native-libs')
+  if options.is_multi_abi and not (options.android_abi
+                                   and options.secondary_android_abi):
+    raise Exception('Must specify --is-multi-abi with both --android-abi '
+                    'and --secondary-android-abi.')
+  return options
+
+
+def _SplitAssetPath(path):
+  """Returns (src, dest) given an asset path in the form src[:dest]."""
+  path_parts = path.split(':')
+  src_path = path_parts[0]
+  if len(path_parts) > 1:
+    dest_path = path_parts[1]
+  else:
+    dest_path = os.path.basename(src_path)
+  return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+  """Converts src:dst into tuples and enumerates files within directories.
+
+  Args:
+    paths: Paths in the form "src_path:dest_path"
+
+  Returns:
+    A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+    ordering within output .apk).
+  """
+  ret = []
+  for path in paths:
+    src_path, dest_path = _SplitAssetPath(path)
+    if os.path.isdir(src_path):
+      for f in build_utils.FindInDirectory(src_path, '*'):
+        ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+    else:
+      ret.append((src_path, dest_path))
+  ret.sort(key=lambda t:t[1])
+  return ret
+
+
+def _GetAssetsToAdd(path_tuples,
+                    fast_align,
+                    disable_compression=False,
+                    allow_reads=True):
+  """Returns the list of file_detail tuples for assets in the apk.
+
+  Args:
+    path_tuples: List of src_path, dest_path tuples to add.
+    fast_align: Whether to perform alignment in python zipfile (alternatively
+                alignment can be done using the zipalign utility out of band).
+    disable_compression: Whether to disable compression.
+    allow_reads: If false, we do not try to read the files from disk (to find
+                 their size for example).
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how assets are added.
+  """
+  assets_to_add = []
+
+  # Group all uncompressed assets together in the hope that it will increase
+  # locality of mmap'ed files.
+  for target_compress in (False, True):
+    for src_path, dest_path in path_tuples:
+      compress = not disable_compression and (
+          os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+
+      if target_compress == compress:
+        # AddToZipHermetic() uses this logic to avoid growing small files.
+        # We need it here in order to set alignment correctly.
+        if allow_reads and compress and os.path.getsize(src_path) < 16:
+          compress = False
+
+        apk_path = 'assets/' + dest_path
+        alignment = 0 if compress and not fast_align else 4
+        assets_to_add.append((apk_path, src_path, compress, alignment))
+  return assets_to_add
+
+
+def _AddFiles(apk, details):
+  """Adds files to the apk.
+
+  Args:
+    apk: path to APK to add to.
+    details: A list of file detail tuples (src_path, apk_path, compress,
+    alignment) representing what and how files are added to the APK.
+  """
+  for apk_path, src_path, compress, alignment in details:
+    # This check is only relevant for assets, but it should not matter if it is
+    # checked for the whole list of files.
+    try:
+      apk.getinfo(apk_path)
+      # Should never happen since write_build_config.py handles merging.
+      raise Exception(
+          'Multiple targets specified the asset path: %s' % apk_path)
+    except KeyError:
+      zipalign.AddToZipHermetic(
+          apk,
+          apk_path,
+          src_path=src_path,
+          compress=compress,
+          alignment=alignment)
+
+
+def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
+                             lib_always_compress, lib_renames):
+  """Returns the list of file_detail tuples for native libraries in the apk.
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how native libraries are added.
+  """
+  libraries_to_add = []
+
+
+  for path in native_libs:
+    basename = os.path.basename(path)
+    compress = not uncompress or any(lib_name in basename
+                                     for lib_name in lib_always_compress)
+    rename = any(lib_name in basename for lib_name in lib_renames)
+    if rename:
+      basename = 'crazy.' + basename
+
+    lib_android_abi = android_abi
+    if path.startswith('android_clang_arm64_hwasan/'):
+      lib_android_abi = 'arm64-v8a-hwasan'
+
+    apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
+    alignment = 0 if compress and not fast_align else 0x1000
+    libraries_to_add.append((apk_path, path, compress, alignment))
+
+  return libraries_to_add
+
+
+def _CreateExpectationsData(native_libs, assets):
+  """Creates list of native libraries and assets."""
+  native_libs = sorted(native_libs)
+  assets = sorted(assets)
+
+  ret = []
+  for apk_path, _, compress, alignment in native_libs + assets:
+    ret.append('apk_path=%s, compress=%s, alignment=%s\n' %
+               (apk_path, compress, alignment))
+  return ''.join(ret)
+
+
+def main(args):
+  build_utils.InitLogging('APKBUILDER_DEBUG')
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  # Until Python 3.7, there's no better way to set compression level.
+  # The default is 6.
+  if options.best_compression:
+    # Compresses about twice as slow as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 9
+  else:
+    # Compresses about twice as fast as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 1
+
+  # Manually align only when alignment is necessary.
+  # Python's zip implementation duplicates file comments in the central
+  # directory, whereas zipalign does not, so use zipalign for official builds.
+  fast_align = options.format == 'apk' and not options.best_compression
+
+  native_libs = sorted(options.native_libs)
+
+  # Include native libs in the depfile_deps since GN doesn't know about the
+  # dependencies when is_component_build=true.
+  depfile_deps = list(native_libs)
+
+  # For targets that depend on static library APKs, dex paths are created by
+  # the static library's dexsplitter target and GN doesn't know about these
+  # paths.
+  if options.dex_file:
+    depfile_deps.append(options.dex_file)
+
+  secondary_native_libs = []
+  if options.secondary_native_libs:
+    secondary_native_libs = sorted(options.secondary_native_libs)
+    depfile_deps += secondary_native_libs
+
+  if options.java_resources:
+    # Included via .build_config, so need to write it to depfile.
+    depfile_deps.extend(options.java_resources)
+
+  assets = _ExpandPaths(options.assets)
+  uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+  # Included via .build_config, so need to write it to depfile.
+  depfile_deps.extend(x[0] for x in assets)
+  depfile_deps.extend(x[0] for x in uncompressed_assets)
+
+  # Bundle modules have a structure similar to APKs, except that resources
+  # are compiled in protobuf format (instead of binary xml), and that some
+  # files are located into different top-level directories, e.g.:
+  #  AndroidManifest.xml -> manifest/AndroidManifest.xml
+  #  classes.dex -> dex/classes.dex
+  #  res/ -> res/  (unchanged)
+  #  assets/ -> assets/  (unchanged)
+  #  <other-file> -> root/<other-file>
+  #
+  # Hence, the following variables are used to control the location of files in
+  # the final archive.
+  if options.format == 'bundle-module':
+    apk_manifest_dir = 'manifest/'
+    apk_root_dir = 'root/'
+    apk_dex_dir = 'dex/'
+  else:
+    apk_manifest_dir = ''
+    apk_root_dir = ''
+    apk_dex_dir = ''
+
+  def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
+    ret = _GetAssetsToAdd(assets,
+                          fast_align,
+                          disable_compression=False,
+                          allow_reads=allow_reads)
+    ret.extend(
+        _GetAssetsToAdd(uncompressed_assets,
+                        fast_align,
+                        disable_compression=True,
+                        allow_reads=allow_reads))
+    return ret
+
+  libs_to_add = _GetNativeLibrariesToAdd(
+      native_libs, options.android_abi, options.uncompress_shared_libraries,
+      fast_align, options.library_always_compress, options.library_renames)
+  if options.secondary_android_abi:
+    libs_to_add.extend(
+        _GetNativeLibrariesToAdd(
+            secondary_native_libs, options.secondary_android_abi,
+            options.uncompress_shared_libraries, fast_align,
+            options.library_always_compress, options.library_renames))
+
+  if options.expected_file:
+    # We compute expectations without reading the files. This allows us to check
+    # expectations for different targets by just generating their build_configs
+    # and not have to first generate all the actual files and all their
+    # dependencies (for example by just passing --only-verify-expectations).
+    asset_details = _GetAssetDetails(assets,
+                                     uncompressed_assets,
+                                     fast_align,
+                                     allow_reads=False)
+
+    actual_data = _CreateExpectationsData(libs_to_add, asset_details)
+    diff_utils.CheckExpectations(actual_data, options)
+
+    if options.only_verify_expectations:
+      if options.depfile:
+        build_utils.WriteDepfile(options.depfile,
+                                 options.actual_file,
+                                 inputs=depfile_deps)
+      return
+
+  # If we are past this point, we are going to actually create the final apk so
+  # we should recompute asset details again but maybe perform some optimizations
+  # based on the size of the files on disk.
+  assets_to_add = _GetAssetDetails(
+      assets, uncompressed_assets, fast_align, allow_reads=True)
+
+  # Targets generally do not depend on apks, so no need for only_if_changed.
+  with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+    with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+         zipfile.ZipFile(f, 'w') as out_apk:
+
+      def add_to_zip(zip_path, data, compress=True, alignment=4):
+        zipalign.AddToZipHermetic(
+            out_apk,
+            zip_path,
+            data=data,
+            compress=compress,
+            alignment=0 if compress and not fast_align else alignment)
+
+      def copy_resource(zipinfo, out_dir=''):
+        add_to_zip(
+            out_dir + zipinfo.filename,
+            resource_apk.read(zipinfo.filename),
+            compress=zipinfo.compress_type != zipfile.ZIP_STORED)
+
+      # Make assets come before resources in order to maintain the same file
+      # ordering as GYP / aapt. http://crbug.com/561862
+      resource_infos = resource_apk.infolist()
+
+      # 1. AndroidManifest.xml
+      logging.debug('Adding AndroidManifest.xml')
+      copy_resource(
+          resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+      # 2. Assets
+      logging.debug('Adding assets/')
+      _AddFiles(out_apk, assets_to_add)
+
+      # 3. Dex files
+      logging.debug('Adding classes.dex')
+      if options.dex_file:
+        with open(options.dex_file, 'rb') as dex_file_obj:
+          if options.dex_file.endswith('.dex'):
+            max_dex_number = 1
+            # This is the case for incremental_install=true.
+            add_to_zip(
+                apk_dex_dir + 'classes.dex',
+                dex_file_obj.read(),
+                compress=not options.uncompress_dex)
+          else:
+            max_dex_number = 0
+            with zipfile.ZipFile(dex_file_obj) as dex_zip:
+              for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+                max_dex_number += 1
+                add_to_zip(
+                    apk_dex_dir + dex,
+                    dex_zip.read(dex),
+                    compress=not options.uncompress_dex)
+
+      if options.jdk_libs_dex_file:
+        with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj:
+          add_to_zip(
+              apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
+              dex_file_obj.read(),
+              compress=not options.uncompress_dex)
+
+      # 4. Native libraries.
+      logging.debug('Adding lib/')
+      _AddFiles(out_apk, libs_to_add)
+
+      # Add a placeholder lib if the APK should be multi ABI but is missing libs
+      # for one of the ABIs.
+      native_lib_placeholders = options.native_lib_placeholders
+      secondary_native_lib_placeholders = (
+          options.secondary_native_lib_placeholders)
+      if options.is_multi_abi:
+        if ((secondary_native_libs or secondary_native_lib_placeholders)
+            and not native_libs and not native_lib_placeholders):
+          native_lib_placeholders += ['libplaceholder.so']
+        if ((native_libs or native_lib_placeholders)
+            and not secondary_native_libs
+            and not secondary_native_lib_placeholders):
+          secondary_native_lib_placeholders += ['libplaceholder.so']
+
+      # Add placeholder libs.
+      for name in sorted(native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.android_abi, name)
+        add_to_zip(apk_path, '', alignment=0x1000)
+
+      for name in sorted(secondary_native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+        add_to_zip(apk_path, '', alignment=0x1000)
+
+      # 5. Resources
+      logging.debug('Adding res/')
+      for info in sorted(resource_infos, key=lambda i: i.filename):
+        if info.filename != 'AndroidManifest.xml':
+          copy_resource(info)
+
+      # 6. Java resources that should be accessible via
+      # Class.getResourceAsStream(), in particular parts of Emma jar.
+      # Prebuilt jars may contain class files which we shouldn't include.
+      logging.debug('Adding Java resources')
+      for java_resource in options.java_resources:
+        with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
+          for apk_path in sorted(java_resource_jar.namelist()):
+            apk_path_lower = apk_path.lower()
+
+            if apk_path_lower.startswith('meta-inf/'):
+              continue
+            if apk_path_lower.endswith('/'):
+              continue
+            if apk_path_lower.endswith('.class'):
+              continue
+
+            add_to_zip(apk_root_dir + apk_path,
+                       java_resource_jar.read(apk_path))
+
+    if options.format == 'apk':
+      zipalign_path = None if fast_align else options.zipalign_path
+      finalize_apk.FinalizeApk(options.apksigner_jar,
+                               zipalign_path,
+                               f.name,
+                               f.name,
+                               options.key_path,
+                               options.key_passwd,
+                               options.key_name,
+                               int(options.min_sdk_version),
+                               warnings_as_errors=options.warnings_as_errors)
+    logging.debug('Moving file into place')
+
+    if options.depfile:
+      build_utils.WriteDepfile(options.depfile,
+                               options.output_apk,
+                               inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/apkbuilder.pydeps b/src/build/android/gyp/apkbuilder.pydeps
new file mode 100644
index 0000000..e6122ed
--- /dev/null
+++ b/src/build/android/gyp/apkbuilder.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../gn_helpers.py
+apkbuilder.py
+finalize_apk.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/zipalign.py
diff --git a/src/build/android/gyp/assert_static_initializers.py b/src/build/android/gyp/assert_static_initializers.py
new file mode 100755
index 0000000..31f2a77
--- /dev/null
+++ b/src/build/android/gyp/assert_static_initializers.py
@@ -0,0 +1,186 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks the number of static initializers in an APK's library."""
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
+                                              'tools', 'linux',
+                                              'dump-static-initializers.py')
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+  return subprocess.check_output([tool_prefix + 'readelf'] + options +
+                                 [so_path]).decode('utf8')
+
+
+def _ParseLibBuildId(so_path, tool_prefix):
+  """Returns the Build ID of the given native library."""
+  stdout = _RunReadelf(so_path, ['-n'], tool_prefix)
+  match = re.search(r'Build ID: (\w+)', stdout)
+  return match.group(1) if match else None
+
+
+def _VerifyLibBuildIdsMatch(tool_prefix, *so_files):
+  if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1:
+    raise Exception('Found differing build ids in output directory and apk. '
+                    'Your output directory is likely stale.')
+
+
+def _GetStaticInitializers(so_path, tool_prefix):
+  output = subprocess.check_output(
+      [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix])
+  summary = re.search(r'Found \d+ static initializers in (\d+) files.', output)
+  return output.splitlines()[:-1], int(summary.group(1))
+
+
+def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
+  lib_name = os.path.basename(apk_so_name).replace('crazy.', '')
+  so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+  if not os.path.exists(so_with_symbols_path):
+    raise Exception('Unstripped .so not found. Looked here: %s',
+                    so_with_symbols_path)
+  _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path)
+  sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix)
+  for si in sis:
+    print(si)
+
+
+# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+def _ReadInitArray(so_path, tool_prefix, expect_no_initializers):
+  stdout = _RunReadelf(so_path, ['-SW'], tool_prefix)
+  # Matches: .init_array INIT_ARRAY 000000000516add0 5169dd0 000010 00 WA 0 0 8
+  match = re.search(r'\.init_array.*$', stdout, re.MULTILINE)
+  if expect_no_initializers:
+    if match:
+      raise Exception(
+          'Expected no initializers for %s, yet some were found' % so_path)
+    else:
+      return 0
+  elif not match:
+    raise Exception('Did not find section: .init_array in {}:\n{}'.format(
+        so_path, stdout))
+  size_str = re.split(r'\W+', match.group(0))[5]
+  return int(size_str, 16)
+
+
+def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers):
+  # Find the number of files with at least one static initializer.
+  # First determine if we're 32 or 64 bit
+  stdout = _RunReadelf(so_path, ['-h'], tool_prefix)
+  elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0)
+  elf_class = re.split(r'\W+', elf_class_line)[1]
+  if elf_class == 'ELF32':
+    word_size = 4
+  else:
+    word_size = 8
+
+  # Then find the number of files with global static initializers.
+  # NOTE: this is very implementation-specific and makes assumptions
+  # about how compiler and linker implement global static initializers.
+  init_array_size = _ReadInitArray(so_path, tool_prefix, expect_no_initializers)
+  return init_array_size / word_size
+
+
+def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir,
+                               ignored_libs, no_initializers_libs):
+  # Static initializer counting mostly copies logic in
+  # infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+  with zipfile.ZipFile(apk_or_aab) as z:
+    so_files = [
+        f for f in z.infolist() if f.filename.endswith('.so')
+        and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
+    ]
+    # Skip checking static initializers for secondary abi libs. They will be
+    # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so
+    # files in the output directory in 64 bit builds.
+    has_64 = any('64' in f.filename for f in so_files)
+    files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+
+    # Do not check partitioned libs. They have no ".init_array" section since
+    # all SIs are considered "roots" by the linker, and so end up in the base
+    # module.
+    files_to_check = [
+        f for f in files_to_check if not f.filename.endswith('_partition.so')
+    ]
+
+    si_count = 0
+    for f in files_to_check:
+      lib_basename = os.path.basename(f.filename)
+      expect_no_initializers = lib_basename in no_initializers_libs
+      with tempfile.NamedTemporaryFile(prefix=lib_basename) as temp:
+        temp.write(z.read(f))
+        temp.flush()
+        si_count += _CountStaticInitializers(temp.name, tool_prefix,
+                                             expect_no_initializers)
+        if dump_sis:
+          # Print count and list of SIs reported by dump-static-initializers.py.
+          # Doesn't work well on all archs (particularly arm), which is why
+          # the readelf method is used for tracking SI counts.
+          _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix)
+  return si_count
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--touch', help='File to touch upon success')
+  parser.add_argument('--tool-prefix', required=True,
+                      help='Prefix for nm and friends')
+  parser.add_argument('--expected-count', required=True, type=int,
+                      help='Fail if number of static initializers is not '
+                           'equal to this value.')
+  parser.add_argument('apk_or_aab', help='Path to .apk or .aab file.')
+  args = parser.parse_args()
+
+  # TODO(crbug.com/838414): add support for files included via loadable_modules.
+  ignored_libs = {
+      'libarcore_sdk_c.so', 'libcrashpad_handler_trampoline.so',
+      'libsketchology_native.so'
+  }
+  # The chromium linker doesn't have static initializers, which makes the
+  # regular check throw. It should not have any.
+  no_initializers_libs = ['libchromium_android_linker.so']
+
+  si_count = _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix,
+                                        False, '.', ignored_libs,
+                                        no_initializers_libs)
+  if si_count != args.expected_count:
+    print('Expected {} static initializers, but found {}.'.format(
+        args.expected_count, si_count))
+    if args.expected_count > si_count:
+      print('You have removed one or more static initializers. Thanks!')
+      print('To fix the build, update the expectation in:')
+      print('    //chrome/android/static_initializers.gni')
+    else:
+      print('Dumping static initializers via dump-static-initializers.py:')
+      sys.stdout.flush()
+      _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.',
+                                 ignored_libs, no_initializers_libs)
+      print()
+      print('If the above list is not useful, consider listing them with:')
+      print('    //tools/binary_size/diagnose_bloat.py')
+      print()
+      print('For more information:')
+      print('    https://chromium.googlesource.com/chromium/src/+/master/docs/'
+            'static_initializers.md')
+    sys.exit(1)
+
+  if args.touch:
+    open(args.touch, 'w')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/assert_static_initializers.pydeps b/src/build/android/gyp/assert_static_initializers.pydeps
new file mode 100644
index 0000000..b574d81
--- /dev/null
+++ b/src/build/android/gyp/assert_static_initializers.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py
+../../gn_helpers.py
+assert_static_initializers.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/bundletool.py b/src/build/android/gyp/bundletool.py
new file mode 100755
index 0000000..dc9b86a
--- /dev/null
+++ b/src/build/android/gyp/bundletool.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple wrapper around the bundletool tool.
+
+Bundletool is distributed as a versioned jar file. This script abstracts the
+location and version of this jar file, as well as the JVM invokation."""
+
+import logging
+import os
+import sys
+
+from util import build_utils
+
+# Assume this is stored under build/android/gyp/
+BUNDLETOOL_DIR = os.path.abspath(os.path.join(
+    __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
+    'bundletool'))
+
+BUNDLETOOL_VERSION = '1.4.0'
+
+BUNDLETOOL_JAR_PATH = os.path.join(
+    BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+
+
+def RunBundleTool(args, warnings_as_errors=(), print_stdout=False):
+  # Use () instead of None because command-line flags are None by default.
+  verify = warnings_as_errors == () or warnings_as_errors
+  # ASAN builds failed with the default of 1GB (crbug.com/1120202).
+  # Bug for bundletool: https://issuetracker.google.com/issues/165911616
+  cmd = build_utils.JavaCmd(verify, xmx='4G')
+  cmd += ['-jar', BUNDLETOOL_JAR_PATH]
+  cmd += args
+  logging.debug(' '.join(cmd))
+  return build_utils.CheckOutput(
+      cmd,
+      print_stdout=print_stdout,
+      print_stderr=True,
+      fail_on_output=False,
+      stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
+
+
+if __name__ == '__main__':
+  RunBundleTool(sys.argv[1:], print_stdout=True)
diff --git a/src/build/android/gyp/bytecode_processor.py b/src/build/android/gyp/bytecode_processor.py
new file mode 100755
index 0000000..d77f159
--- /dev/null
+++ b/src/build/android/gyp/bytecode_processor.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps bin/helper/bytecode_processor and expands @FileArgs."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import server_utils
+
+
+def _AddSwitch(parser, val):
+  parser.add_argument(
+      val, action='store_const', default='--disabled', const=val)
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--target-name', help='Fully qualified GN target name.')
+  parser.add_argument('--script', required=True,
+                      help='Path to the java binary wrapper script.')
+  parser.add_argument('--gn-target', required=True)
+  parser.add_argument('--input-jar', required=True)
+  parser.add_argument('--direct-classpath-jars')
+  parser.add_argument('--sdk-classpath-jars')
+  parser.add_argument('--full-classpath-jars')
+  parser.add_argument('--full-classpath-gn-targets')
+  parser.add_argument('--stamp')
+  parser.add_argument('-v', '--verbose', action='store_true')
+  parser.add_argument('--missing-classes-allowlist')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  _AddSwitch(parser, '--is-prebuilt')
+  args = parser.parse_args(argv)
+
+  if server_utils.MaybeRunCommand(name=args.target_name,
+                                  argv=sys.argv,
+                                  stamp_file=args.stamp):
+    return
+
+  args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+  args.direct_classpath_jars = build_utils.ParseGnList(
+      args.direct_classpath_jars)
+  args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
+  args.full_classpath_gn_targets = build_utils.ParseGnList(
+      args.full_classpath_gn_targets)
+  args.missing_classes_allowlist = build_utils.ParseGnList(
+      args.missing_classes_allowlist)
+
+  verbose = '--verbose' if args.verbose else '--not-verbose'
+
+  cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt]
+  cmd += [str(len(args.missing_classes_allowlist))]
+  cmd += args.missing_classes_allowlist
+  cmd += [str(len(args.sdk_classpath_jars))]
+  cmd += args.sdk_classpath_jars
+  cmd += [str(len(args.direct_classpath_jars))]
+  cmd += args.direct_classpath_jars
+  cmd += [str(len(args.full_classpath_jars))]
+  cmd += args.full_classpath_jars
+  cmd += [str(len(args.full_classpath_gn_targets))]
+  cmd += args.full_classpath_gn_targets
+  build_utils.CheckOutput(cmd,
+                          print_stdout=True,
+                          fail_func=None,
+                          fail_on_output=args.warnings_as_errors)
+
+  if args.stamp:
+    build_utils.Touch(args.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/bytecode_processor.pydeps b/src/build/android/gyp/bytecode_processor.pydeps
new file mode 100644
index 0000000..6105d93
--- /dev/null
+++ b/src/build/android/gyp/bytecode_processor.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../gn_helpers.py
+bytecode_processor.py
+util/__init__.py
+util/build_utils.py
+util/server_utils.py
diff --git a/src/build/android/gyp/bytecode_rewriter.py b/src/build/android/gyp/bytecode_rewriter.py
new file mode 100755
index 0000000..ad232df
--- /dev/null
+++ b/src/build/android/gyp/bytecode_rewriter.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper script around ByteCodeRewriter subclass scripts."""
+
+import argparse
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--script',
+                      required=True,
+                      help='Path to the java binary wrapper script.')
+  parser.add_argument('--classpath', action='append', nargs='+')
+  parser.add_argument('--input-jar', required=True)
+  parser.add_argument('--output-jar', required=True)
+  args = parser.parse_args(argv)
+
+  classpath = build_utils.ParseGnList(args.classpath)
+  build_utils.WriteDepfile(args.depfile, args.output_jar, inputs=classpath)
+
+  classpath.append(args.input_jar)
+  cmd = [
+      args.script, '--classpath', ':'.join(classpath), args.input_jar,
+      args.output_jar
+  ]
+  build_utils.CheckOutput(cmd, print_stdout=True)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/bytecode_rewriter.pydeps b/src/build/android/gyp/bytecode_rewriter.pydeps
new file mode 100644
index 0000000..b8f304a
--- /dev/null
+++ b/src/build/android/gyp/bytecode_rewriter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py
+../../gn_helpers.py
+bytecode_rewriter.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/check_flag_expectations.py b/src/build/android/gyp/check_flag_expectations.py
new file mode 100755
index 0000000..22da211
--- /dev/null
+++ b/src/build/android/gyp/check_flag_expectations.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+
+from util import build_utils
+from util import diff_utils
+
+IGNORE_FLAG_PREFIXES = [
+    # For cflags.
+    '-DANDROID_NDK_VERSION_ROLL',
+    '-DCR_LIBCXX_REVISION',
+    '-I',
+    '-g',
+    '-fcrash-diagnostics-dir=',
+    '-fprofile',
+    '--no-system-header-prefix',
+    '--system-header-prefix',
+    '-isystem',
+    '-iquote',
+    '-fmodule-map',
+    '-frandom-seed',
+    '-c ',
+    '-o ',
+    '-fmodule-name=',
+    '--sysroot=',
+    '-fcolor-diagnostics',
+    '-MF ',
+    '-MD',
+
+    # For ldflags.
+    '-Wl,--thinlto-cache-dir',
+    '-Wl,--thinlto-cache-policy',
+    '-Wl,--thinlto-jobs',
+    '-Wl,--start-lib',
+    '-Wl,--end-lib',
+    '-Wl,-whole-archive',
+    '-Wl,-no-whole-archive',
+    '-l',
+    '-L',
+    '-Wl,-soname',
+    '-Wl,-version-script',
+    '-Wl,--version-script',
+    '-fdiagnostics-color',
+    '-Wl,--color-diagnostics',
+    '-B',
+    '-Wl,--dynamic-linker',
+    '-DCR_CLANG_REVISION=',
+]
+
+FLAGS_WITH_PARAMS = (
+    '-Xclang',
+    '-mllvm',
+    '-Xclang -fdebug-compilation-dir',
+    '-Xclang -add-plugin',
+)
+
+
+def KeepFlag(flag):
+  return not any(flag.startswith(prefix) for prefix in IGNORE_FLAG_PREFIXES)
+
+
+def MergeFlags(flags):
+  flags = _MergeFlagsHelper(flags)
+  # For double params eg: -Xclang -fdebug-compilation-dir
+  flags = _MergeFlagsHelper(flags)
+  return flags
+
+
+def _MergeFlagsHelper(flags):
+  merged_flags = []
+  while flags:
+    current_flag = flags.pop(0)
+    if flags:
+      next_flag = flags[0]
+    else:
+      next_flag = None
+    merge_flags = False
+
+    # Special case some flags that always come with params.
+    if current_flag in FLAGS_WITH_PARAMS:
+      merge_flags = True
+    # Assume flags without '-' are a param.
+    if next_flag and not next_flag.startswith('-'):
+      merge_flags = True
+    # Special case -plugin-arg prefix because it has the plugin name.
+    if current_flag.startswith('-Xclang -plugin-arg'):
+      merge_flags = True
+    if merge_flags:
+      merged_flag = '{} {}'.format(current_flag, next_flag)
+      merged_flags.append(merged_flag)
+      flags.pop(0)
+    else:
+      merged_flags.append(current_flag)
+  return merged_flags
+
+
+def ParseFlags(flag_file_path):
+  flags = []
+  with open(flag_file_path) as f:
+    for flag in f.read().splitlines():
+      if KeepFlag(flag):
+        flags.append(flag)
+  return flags
+
+
+def main():
+  """Compare the flags with the checked in list."""
+  parser = argparse.ArgumentParser()
+  diff_utils.AddCommandLineFlags(parser)
+  parser.add_argument('--current-flags',
+                      help='Path to flags to check against expectations.')
+  options = parser.parse_args()
+
+  flags = ParseFlags(options.current_flags)
+  flags = MergeFlags(flags)
+
+  msg = """
+This expectation file is meant to inform the build team about changes to
+flags used when building native libraries in chrome (most importantly any
+that relate to security). This is to ensure the flags are replicated when
+building native libraries outside of the repo. Please update the .expected
+files and a WATCHLIST entry will alert the build team to your change."""
+  diff_utils.CheckExpectations('\n'.join(sorted(flags)),
+                               options,
+                               custom_msg=msg)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/check_flag_expectations.pydeps b/src/build/android/gyp/check_flag_expectations.pydeps
new file mode 100644
index 0000000..d8c394a
--- /dev/null
+++ b/src/build/android/gyp/check_flag_expectations.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py
+../../gn_helpers.py
+check_flag_expectations.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
diff --git a/src/build/android/gyp/compile_java.py b/src/build/android/gyp/compile_java.py
new file mode 100755
index 0000000..2a92842
--- /dev/null
+++ b/src/build/android/gyp/compile_java.py
@@ -0,0 +1,756 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import multiprocessing
+import optparse
+import os
+import re
+import shutil
+import sys
+import time
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import jar_info_utils
+from util import server_utils
+
+sys.path.insert(
+    0,
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src'))
+import colorama
+
+_JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party',
+                                'android_prebuilts', 'build_tools', 'common',
+                                'framework', 'javac_extractor.jar')
+
+# Add a check here to cause the suggested fix to be applied while compiling.
+# Use this when trying to enable more checks.
+ERRORPRONE_CHECKS_TO_APPLY = []
+
+# Full list of checks: https://errorprone.info/bugpatterns
+ERRORPRONE_WARNINGS_TO_DISABLE = [
+    # The following are super useful, but existing issues need to be fixed first
+    # before they can start failing the build on new errors.
+    'InvalidParam',
+    'InvalidLink',
+    'InvalidInlineTag',
+    'EmptyBlockTag',
+    'PublicConstructorForAbstractClass',
+    'InvalidBlockTag',
+    'StaticAssignmentInConstructor',
+    'MutablePublicArray',
+    'UnescapedEntity',
+    'NonCanonicalType',
+    'AlmostJavadoc',
+    # TODO(crbug.com/834807): Follow steps in bug
+    'DoubleBraceInitialization',
+    # TODO(crbug.com/834790): Follow steps in bug.
+    'CatchAndPrintStackTrace',
+    # TODO(crbug.com/801210): Follow steps in bug.
+    'SynchronizeOnNonFinalField',
+    # TODO(crbug.com/802073): Follow steps in bug.
+    'TypeParameterUnusedInFormals',
+    # TODO(crbug.com/803484): Follow steps in bug.
+    'CatchFail',
+    # TODO(crbug.com/803485): Follow steps in bug.
+    'JUnitAmbiguousTestClass',
+    # Android platform default is always UTF-8.
+    # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
+    'DefaultCharset',
+    # Low priority since the alternatives still work.
+    'JdkObsolete',
+    # We don't use that many lambdas.
+    'FunctionalInterfaceClash',
+    # There are lots of times when we just want to post a task.
+    'FutureReturnValueIgnored',
+    # Nice to be explicit about operators, but not necessary.
+    'OperatorPrecedence',
+    # Just false positives in our code.
+    'ThreadJoinLoop',
+    # Low priority corner cases with String.split.
+    # Linking Guava and using Splitter was rejected
+    # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630.
+    'StringSplitter',
+    # Preferred to use another method since it propagates exceptions better.
+    'ClassNewInstance',
+    # Nice to have static inner classes but not necessary.
+    'ClassCanBeStatic',
+    # Explicit is better than implicit.
+    'FloatCast',
+    # Results in false positives.
+    'ThreadLocalUsage',
+    # Also just false positives.
+    'Finally',
+    # False positives for Chromium.
+    'FragmentNotInstantiable',
+    # Low priority to fix.
+    'HidingField',
+    # Low priority.
+    'IntLongMath',
+    # Low priority.
+    'BadComparable',
+    # Low priority.
+    'EqualsHashCode',
+    # Nice to fix but low priority.
+    'TypeParameterShadowing',
+    # Good to have immutable enums, also low priority.
+    'ImmutableEnumChecker',
+    # False positives for testing.
+    'InputStreamSlowMultibyteRead',
+    # Nice to have better primitives.
+    'BoxedPrimitiveConstructor',
+    # Not necessary for tests.
+    'OverrideThrowableToString',
+    # Nice to have better type safety.
+    'CollectionToArraySafeParameter',
+    # Makes logcat debugging more difficult, and does not provide obvious
+    # benefits in the Chromium codebase.
+    'ObjectToString',
+    # Triggers on private methods that are @CalledByNative.
+    'UnusedMethod',
+    # Triggers on generated R.java files.
+    'UnusedVariable',
+    # Not that useful.
+    'UnsafeReflectiveConstructionCast',
+    # Not that useful.
+    'MixedMutabilityReturnType',
+    # Nice to have.
+    'EqualsGetClass',
+    # A lot of false-positives from CharSequence.equals().
+    'UndefinedEquals',
+    # Nice to have.
+    'ExtendingJUnitAssert',
+    # Nice to have.
+    'SystemExitOutsideMain',
+    # Nice to have.
+    'TypeParameterNaming',
+    # Nice to have.
+    'UnusedException',
+    # Nice to have.
+    'UngroupedOverloads',
+    # Nice to have.
+    'FunctionalInterfaceClash',
+    # Nice to have.
+    'InconsistentOverloads',
+    # Dagger generated code triggers this.
+    'SameNameButDifferent',
+    # Nice to have.
+    'UnnecessaryLambda',
+    # Nice to have.
+    'UnnecessaryAnonymousClass',
+    # Nice to have.
+    'LiteProtoToString',
+    # Nice to have.
+    'MissingSummary',
+    # Nice to have.
+    'ReturnFromVoid',
+    # Nice to have.
+    'EmptyCatch',
+    # Nice to have.
+    'BadImport',
+    # Nice to have.
+    'UseCorrectAssertInTests',
+    # Nice to have.
+    'InlineFormatString',
+    # Nice to have.
+    'DefaultPackage',
+    # Must be off since we are now passing in annotation processor generated
+    # code as a source jar (deduplicating work with turbine).
+    'RefersToDaggerCodegen',
+    # We already have presubmit checks for this. Not necessary to warn on
+    # every build.
+    'RemoveUnusedImports',
+    # We do not care about unnecessary parenthesis enough to check for them.
+    'UnnecessaryParentheses',
+]
+
+# Full list of checks: https://errorprone.info/bugpatterns
+# Only those marked as "experimental" need to be listed here in order to be
+# enabled.
+ERRORPRONE_WARNINGS_TO_ENABLE = [
+    'BinderIdentityRestoredDangerously',
+    'EmptyIf',
+    'EqualsBrokenForNull',
+    'InvalidThrows',
+    'LongLiteralLowerCaseSuffix',
+    'MultiVariableDeclaration',
+    'ParameterNotNullable',
+    'RedundantOverride',
+    'StaticQualifiedUsingExpression',
+    'StringEquality',
+    'TimeUnitMismatch',
+    'UnnecessaryStaticImport',
+    'UseBinds',
+    'WildcardImport',
+]
+
+
+def ProcessJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(fileline_prefix +
+                          r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(fileline_prefix +
+                        r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  # These warnings cannot be suppressed even for third party code. Deprecation
+  # warnings especially do not help since we must support older android version.
+  deprecated_re = re.compile(
+      r'(Note: .* uses? or overrides? a deprecated API.)$')
+  unchecked_re = re.compile(
+      r'(Note: .* uses? unchecked or unsafe operations.)$')
+  recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$')
+
+  activity_re = re.compile(r'^(?P<prefix>\s*location: )class Activity$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET +
+            colorama.Style.RESET_ALL + line[end:])
+
+  def ApplyFilters(line):
+    return not (deprecated_re.match(line) or unchecked_re.match(line)
+                or recompile_re.match(line))
+
+  def Elaborate(line):
+    if activity_re.match(line):
+      prefix = ' ' * activity_re.match(line).end('prefix')
+      return '{}\n{}Expecting a FragmentActivity? See {}'.format(
+          line, prefix, 'docs/ui/android/bytecode_rewriting.md')
+    return line
+
+  def ApplyColors(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  lines = (l for l in output.split('\n') if ApplyFilters(l))
+  lines = (ApplyColors(Elaborate(l)) for l in lines)
+  return '\n'.join(lines)
+
+
+def _ParsePackageAndClassNames(java_file):
+  package_name = ''
+  class_names = []
+  with open(java_file) as f:
+    for l in f:
+      # Strip unindented comments.
+      # Considers a leading * as a continuation of a multi-line comment (our
+      # linter doesn't enforce a space before it like there should be).
+      l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+
+      m = re.match(r'package\s+(.*?);', l)
+      if m and not package_name:
+        package_name = m.group(1)
+
+      # Not exactly a proper parser, but works for sources that Chrome uses.
+      # In order to not match nested classes, it just checks for lack of indent.
+      m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l)
+      if m:
+        class_names.append(m.group(1))
+  return package_name, class_names
+
+
+def _ProcessJavaFileForInfo(java_file):
+  package_name, class_names = _ParsePackageAndClassNames(java_file)
+  return java_file, package_name, class_names
+
+
+class _InfoFileContext(object):
+  """Manages the creation of the class->source file .info file."""
+
+  def __init__(self, chromium_code, excluded_globs):
+    self._chromium_code = chromium_code
+    self._excluded_globs = excluded_globs
+    # Map of .java path -> .srcjar/nested/path.java.
+    self._srcjar_files = {}
+    # List of generators from pool.imap_unordered().
+    self._results = []
+    # Lazily created multiprocessing.Pool.
+    self._pool = None
+
+  def AddSrcJarSources(self, srcjar_path, extracted_paths, parent_dir):
+    for path in extracted_paths:
+      # We want the path inside the srcjar so the viewer can have a tree
+      # structure.
+      self._srcjar_files[path] = '{}/{}'.format(
+          srcjar_path, os.path.relpath(path, parent_dir))
+
+  def SubmitFiles(self, java_files):
+    if self._pool is None:
+      # Restrict to just one process to not slow down compiling. Compiling
+      # is always slower.
+      self._pool = multiprocessing.Pool(1)
+    logging.info('Submitting %d files for info', len(java_files))
+    self._results.append(
+        self._pool.imap_unordered(
+            _ProcessJavaFileForInfo, java_files, chunksize=1000))
+
+  def _CheckPathMatchesClassName(self, java_file, package_name, class_name):
+    parts = package_name.split('.') + [class_name + '.java']
+    expected_path_suffix = os.path.sep.join(parts)
+    if not java_file.endswith(expected_path_suffix):
+      raise Exception(('Java package+class name do not match its path.\n'
+                       'Actual path: %s\nExpected path: %s') %
+                      (java_file, expected_path_suffix))
+
+  def _ProcessInfo(self, java_file, package_name, class_names, source):
+    for class_name in class_names:
+      yield '{}.{}'.format(package_name, class_name)
+      # Skip aidl srcjars since they don't indent code correctly.
+      if '_aidl.srcjar' in source:
+        continue
+      assert not self._chromium_code or len(class_names) == 1, (
+          'Chromium java files must only have one class: {}'.format(source))
+      if self._chromium_code:
+        # This check is not necessary but nice to check this somewhere.
+        self._CheckPathMatchesClassName(java_file, package_name, class_names[0])
+
+  def _ShouldIncludeInJarInfo(self, fully_qualified_name):
+    name_as_class_glob = fully_qualified_name.replace('.', '/') + '.class'
+    return not build_utils.MatchesGlob(name_as_class_glob, self._excluded_globs)
+
+  def _Collect(self):
+    if self._pool is None:
+      return {}
+    ret = {}
+    for result in self._results:
+      for java_file, package_name, class_names in result:
+        source = self._srcjar_files.get(java_file, java_file)
+        for fully_qualified_name in self._ProcessInfo(java_file, package_name,
+                                                      class_names, source):
+          if self._ShouldIncludeInJarInfo(fully_qualified_name):
+            ret[fully_qualified_name] = java_file
+    self._pool.terminate()
+    return ret
+
+  def __del__(self):
+    # Work around for Python 2.x bug with multiprocessing and daemon threads:
+    # https://bugs.python.org/issue4106
+    if self._pool is not None:
+      logging.info('Joining multiprocessing.Pool')
+      self._pool.terminate()
+      self._pool.join()
+      logging.info('Done.')
+
+  def Commit(self, output_path):
+    """Writes a .jar.info file.
+
+    Maps fully qualified names for classes to either the java file that they
+    are defined in or the path of the srcjar that they came from.
+    """
+    logging.info('Collecting info file entries')
+    entries = self._Collect()
+
+    logging.info('Writing info file: %s', output_path)
+    with build_utils.AtomicOutput(output_path, mode='wb') as f:
+      jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files)
+    logging.info('Completed info file: %s', output_path)
+
+
+def _CreateJarFile(jar_path, service_provider_configuration_dir,
+                   additional_jar_files, classes_dir):
+  logging.info('Start creating jar file: %s', jar_path)
+  with build_utils.AtomicOutput(jar_path) as f:
+    with zipfile.ZipFile(f.name, 'w') as z:
+      build_utils.ZipDir(z, classes_dir)
+      if service_provider_configuration_dir:
+        config_files = build_utils.FindInDirectory(
+            service_provider_configuration_dir)
+        for config_file in config_files:
+          zip_path = os.path.relpath(config_file,
+                                     service_provider_configuration_dir)
+          build_utils.AddToZipHermetic(z, zip_path, src_path=config_file)
+
+      if additional_jar_files:
+        for src_path, zip_path in additional_jar_files:
+          build_utils.AddToZipHermetic(z, zip_path, src_path=src_path)
+  logging.info('Completed jar file: %s', jar_path)
+
+
+def _OnStaleMd5(options, javac_cmd, javac_args, java_files):
+  logging.info('Starting _OnStaleMd5')
+  if options.enable_kythe_annotations:
+    # Kythe requires those env variables to be set and compile_java.py does the
+    # same
+    if not os.environ.get('KYTHE_ROOT_DIRECTORY') or \
+        not os.environ.get('KYTHE_OUTPUT_DIRECTORY'):
+      raise Exception('--enable-kythe-annotations requires '
+                      'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY '
+                      'environment variables to be set.')
+    javac_extractor_cmd = build_utils.JavaCmd() + [
+        '-jar',
+        _JAVAC_EXTRACTOR,
+    ]
+    try:
+      _RunCompiler(options, javac_extractor_cmd + javac_args, java_files,
+                   options.classpath, options.jar_path + '.javac_extractor',
+                   save_outputs=False),
+    except build_utils.CalledProcessError as e:
+      # Having no index for particular target is better than failing entire
+      # codesearch. Log and error and move on.
+      logging.error('Could not generate kzip: %s', e)
+
+  # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+  # rules run both in parallel, with Error Prone only used for checks.
+  _RunCompiler(options,
+               javac_cmd + javac_args,
+               java_files,
+               options.classpath,
+               options.jar_path,
+               save_outputs=not options.enable_errorprone)
+  logging.info('Completed all steps in _OnStaleMd5')
+
+
+def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path,
+                 save_outputs=True):
+  logging.info('Starting _RunCompiler')
+
+  # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+  # rules run both in parallel, with Error Prone only used for checks.
+  save_outputs = not options.enable_errorprone
+
+  # Use jar_path's directory to ensure paths are relative (needed for goma).
+  temp_dir = jar_path + '.staging'
+  shutil.rmtree(temp_dir, True)
+  os.makedirs(temp_dir)
+  try:
+    classes_dir = os.path.join(temp_dir, 'classes')
+    service_provider_configuration = os.path.join(
+        temp_dir, 'service_provider_configuration')
+
+    if save_outputs:
+      input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars')
+      annotation_processor_outputs_dir = os.path.join(
+          options.generated_dir, 'annotation_processor_outputs')
+      # Delete any stale files in the generated directory. The purpose of
+      # options.generated_dir is for codesearch.
+      shutil.rmtree(options.generated_dir, True)
+      info_file_context = _InfoFileContext(options.chromium_code,
+                                           options.jar_info_exclude_globs)
+    else:
+      input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars')
+      annotation_processor_outputs_dir = os.path.join(
+          temp_dir, 'annotation_processor_outputs')
+
+    if options.java_srcjars:
+      logging.info('Extracting srcjars to %s', input_srcjars_dir)
+      build_utils.MakeDirectory(input_srcjars_dir)
+      for srcjar in options.java_srcjars:
+        extracted_files = build_utils.ExtractAll(
+            srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java')
+        java_files.extend(extracted_files)
+        if save_outputs:
+          info_file_context.AddSrcJarSources(srcjar, extracted_files,
+                                             input_srcjars_dir)
+      logging.info('Done extracting srcjars')
+
+    if options.header_jar:
+      logging.info('Extracting service provider configs')
+      # Extract META-INF/services/* so that it can be copied into the output
+      # .jar
+      build_utils.ExtractAll(options.header_jar,
+                             no_clobber=True,
+                             path=service_provider_configuration,
+                             pattern='META-INF/services/*')
+      logging.info('Done extracting service provider configs')
+
+    if save_outputs and java_files:
+      info_file_context.SubmitFiles(java_files)
+
+    if java_files:
+      # Don't include the output directory in the initial set of args since it
+      # being in a temp dir makes it unstable (breaks md5 stamping).
+      cmd = list(javac_cmd)
+      os.makedirs(classes_dir)
+      cmd += ['-d', classes_dir]
+
+      if options.processors:
+        os.makedirs(annotation_processor_outputs_dir)
+        cmd += ['-s', annotation_processor_outputs_dir]
+
+      if classpath:
+        cmd += ['-classpath', ':'.join(classpath)]
+
+      # Pass source paths as response files to avoid extremely long command
+      # lines that are tedius to debug.
+      java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+      with open(java_files_rsp_path, 'w') as f:
+        f.write(' '.join(java_files))
+      cmd += ['@' + java_files_rsp_path]
+
+      logging.debug('Build command %s', cmd)
+      start = time.time()
+      build_utils.CheckOutput(cmd,
+                              print_stdout=options.chromium_code,
+                              stdout_filter=ProcessJavacOutput,
+                              stderr_filter=ProcessJavacOutput,
+                              fail_on_output=options.warnings_as_errors)
+      end = time.time() - start
+      logging.info('Java compilation took %ss', end)
+
+    if save_outputs:
+      if options.processors:
+        annotation_processor_java_files = build_utils.FindInDirectory(
+            annotation_processor_outputs_dir)
+        if annotation_processor_java_files:
+          info_file_context.SubmitFiles(annotation_processor_java_files)
+
+      _CreateJarFile(jar_path, service_provider_configuration,
+                     options.additional_jar_files, classes_dir)
+
+      info_file_context.Commit(jar_path + '.info')
+    else:
+      build_utils.Touch(jar_path)
+
+    logging.info('Completed all steps in _RunCompiler')
+  finally:
+    shutil.rmtree(temp_dir)
+
+
+def _ParseOptions(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--target-name', help='Fully qualified GN target name.')
+  parser.add_option('--skip-build-server',
+                    action='store_true',
+                    help='Avoid using the build server.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--generated-dir',
+      help='Subdirectory within target_gen_dir to place extracted srcjars and '
+      'annotation processor output for codesearch to find.')
+  parser.add_option(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_option(
+      '--java-version',
+      help='Java language version to use in -source and -target args to javac.')
+  parser.add_option('--classpath', action='append', help='Classpath to use.')
+  parser.add_option(
+      '--processors',
+      action='append',
+      help='GN list of annotation processor main classes.')
+  parser.add_option(
+      '--processorpath',
+      action='append',
+      help='GN list of jars that comprise the classpath used for Annotation '
+      'Processors.')
+  parser.add_option(
+      '--processor-arg',
+      dest='processor_args',
+      action='append',
+      help='key=value arguments for the annotation processors.')
+  parser.add_option(
+      '--additional-jar-file',
+      dest='additional_jar_files',
+      action='append',
+      help='Additional files to package into jar. By default, only Java .class '
+      'files are packaged into the jar. Files should be specified in '
+      'format <filename>:<path to be placed in jar>.')
+  parser.add_option(
+      '--jar-info-exclude-globs',
+      help='GN list of exclude globs to filter from generated .info files.')
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+  parser.add_option(
+      '--gomacc-path', help='When set, prefix javac command with gomacc')
+  parser.add_option(
+      '--errorprone-path', help='Use the Errorprone compiler at this path.')
+  parser.add_option(
+      '--enable-errorprone',
+      action='store_true',
+      help='Enable errorprone checks')
+  parser.add_option(
+      '--warnings-as-errors',
+      action='store_true',
+      help='Treat all warnings as errors.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option(
+      '--javac-arg',
+      action='append',
+      default=[],
+      help='Additional arguments to pass to javac.')
+  parser.add_option(
+      '--enable-kythe-annotations',
+      action='store_true',
+      help='Enable generation of Kythe kzip, used for codesearch. Ensure '
+      'proper environment variables are set before using this flag.')
+  parser.add_option(
+      '--header-jar',
+      help='This is the header jar for the current target that contains '
+      'META-INF/services/* files to be included in the output jar.')
+
+  options, args = parser.parse_args(argv)
+  build_utils.CheckOptions(options, parser, required=('jar_path', ))
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.processorpath = build_utils.ParseGnList(options.processorpath)
+  options.processors = build_utils.ParseGnList(options.processors)
+  options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+  options.jar_info_exclude_globs = build_utils.ParseGnList(
+      options.jar_info_exclude_globs)
+
+  additional_jar_files = []
+  for arg in options.additional_jar_files or []:
+    filepath, jar_filepath = arg.split(':')
+    additional_jar_files.append((filepath, jar_filepath))
+  options.additional_jar_files = additional_jar_files
+
+  java_files = []
+  for arg in args:
+    # Interpret a path prefixed with @ as a file containing a list of sources.
+    if arg.startswith('@'):
+      java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+    else:
+      java_files.append(arg)
+
+  return options, java_files
+
+
+def main(argv):
+  build_utils.InitLogging('JAVAC_DEBUG')
+  argv = build_utils.ExpandFileArgs(argv)
+  options, java_files = _ParseOptions(argv)
+
+  # Only use the build server for errorprone runs.
+  if (options.enable_errorprone and not options.skip_build_server
+      and server_utils.MaybeRunCommand(name=options.target_name,
+                                       argv=sys.argv,
+                                       stamp_file=options.jar_path)):
+    return
+
+  colorama.init()
+  javac_cmd = []
+  if options.gomacc_path:
+    javac_cmd.append(options.gomacc_path)
+  javac_cmd.append(build_utils.JAVAC_PATH)
+
+  javac_args = [
+      '-g',
+      # Chromium only allows UTF8 source files.  Being explicit avoids
+      # javac pulling a default encoding from the user's environment.
+      '-encoding',
+      'UTF-8',
+      # Prevent compiler from compiling .java files not listed as inputs.
+      # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+      '-sourcepath',
+      ':',
+  ]
+
+  if options.enable_errorprone:
+    # All errorprone args are passed space-separated in a single arg.
+    errorprone_flags = ['-Xplugin:ErrorProne']
+    # Make everything a warning so that when treat_warnings_as_errors is false,
+    # they do not fail the build.
+    errorprone_flags += ['-XepAllErrorsAsWarnings']
+    # Don't check generated files.
+    errorprone_flags += ['-XepDisableWarningsInGeneratedCode']
+    errorprone_flags.extend('-Xep:{}:OFF'.format(x)
+                            for x in ERRORPRONE_WARNINGS_TO_DISABLE)
+    errorprone_flags.extend('-Xep:{}:WARN'.format(x)
+                            for x in ERRORPRONE_WARNINGS_TO_ENABLE)
+
+    if ERRORPRONE_CHECKS_TO_APPLY:
+      errorprone_flags += [
+          '-XepPatchLocation:IN_PLACE',
+          '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY)
+      ]
+
+    javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)]
+
+    # This flag quits errorprone after checks and before code generation, since
+    # we do not need errorprone outputs, this speeds up errorprone by 4 seconds
+    # for chrome_java.
+    if not ERRORPRONE_CHECKS_TO_APPLY:
+      javac_args += ['-XDshould-stop.ifNoError=FLOW']
+
+  if options.java_version:
+    javac_args.extend([
+        '-source',
+        options.java_version,
+        '-target',
+        options.java_version,
+    ])
+  if options.java_version == '1.8':
+    # Android's boot jar doesn't contain all java 8 classes.
+    options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+  if options.processors:
+    javac_args.extend(['-processor', ','.join(options.processors)])
+  else:
+    # This effectively disables all annotation processors, even including
+    # annotation processors in service provider configuration files named
+    # META-INF/. See the following link for reference:
+    #     https://docs.oracle.com/en/java/javase/11/tools/javac.html
+    javac_args.extend(['-proc:none'])
+
+  if options.bootclasspath:
+    javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)])
+
+  if options.processorpath:
+    javac_args.extend(['-processorpath', ':'.join(options.processorpath)])
+  if options.processor_args:
+    for arg in options.processor_args:
+      javac_args.extend(['-A%s' % arg])
+
+  javac_args.extend(options.javac_arg)
+
+  classpath_inputs = (
+      options.bootclasspath + options.classpath + options.processorpath)
+
+  depfile_deps = classpath_inputs
+  # Files that are already inputs in GN should go in input_paths.
+  input_paths = depfile_deps + options.java_srcjars + java_files
+  if options.header_jar:
+    input_paths.append(options.header_jar)
+  input_paths += [x[0] for x in options.additional_jar_files]
+
+  output_paths = [options.jar_path]
+  if not options.enable_errorprone:
+    output_paths += [options.jar_path + '.info']
+
+  input_strings = javac_cmd + javac_args + options.classpath + java_files + [
+      options.warnings_as_errors, options.jar_info_exclude_globs
+  ]
+
+  # Keep md5_check since we plan to use its changes feature to implement a build
+  # speed improvement for non-signature compiles: https://crbug.com/1170778
+  md5_check.CallAndWriteDepfileIfStale(
+      lambda: _OnStaleMd5(options, javac_cmd, javac_args, java_files),
+      options,
+      depfile_deps=depfile_deps,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/compile_java.pydeps b/src/build/android/gyp/compile_java.pydeps
new file mode 100644
index 0000000..f14fd0b
--- /dev/null
+++ b/src/build/android/gyp/compile_java.pydeps
@@ -0,0 +1,16 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_java.pydeps build/android/gyp/compile_java.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../gn_helpers.py
+../../print_python_deps.py
+compile_java.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/server_utils.py
diff --git a/src/build/android/gyp/compile_resources.py b/src/build/android/gyp/compile_resources.py
new file mode 100755
index 0000000..8a668e7
--- /dev/null
+++ b/src/build/android/gyp/compile_resources.py
@@ -0,0 +1,1179 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import contextlib
+import filecmp
+import hashlib
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import diff_utils
+from util import manifest_utils
+from util import parallel
+from util import protoresources
+from util import resource_utils
+
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
+    # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+    r'.*star_gray\.png',
+    # Android requires pngs for 9-patch images.
+    r'.*\.9\.png',
+    # Daydream requires pngs for icon files.
+    r'.*daydream_icon_.*\.png'
+]))
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from argparse.ArgumentParser.parse_args()
+  """
+  parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+  input_opts.add_argument(
+      '--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
+  input_opts.add_argument(
+      '--android-manifest', required=True, help='AndroidManifest.xml path.')
+  input_opts.add_argument(
+      '--r-java-root-package-name',
+      default='base',
+      help='Short package name for this target\'s root R java file (ex. '
+      'input of "base" would become gen.base_module). Defaults to "base".')
+  group = input_opts.add_mutually_exclusive_group()
+  group.add_argument(
+      '--shared-resources',
+      action='store_true',
+      help='Make all resources in R.java non-final and allow the resource IDs '
+      'to be reset to a different package index when the apk is loaded by '
+      'another application at runtime.')
+  group.add_argument(
+      '--app-as-shared-lib',
+      action='store_true',
+      help='Same as --shared-resources, but also ensures all resource IDs are '
+      'directly usable from the APK loaded as an application.')
+
+  input_opts.add_argument(
+      '--package-id',
+      type=int,
+      help='Decimal integer representing custom package ID for resources '
+      '(instead of 127==0x7f). Cannot be used with --shared-resources.')
+
+  input_opts.add_argument(
+      '--package-name',
+      help='Package name that will be used to create R class.')
+
+  input_opts.add_argument(
+      '--rename-manifest-package', help='Package name to force AAPT to use.')
+
+  input_opts.add_argument(
+      '--arsc-package-name',
+      help='Package name to set in manifest of resources.arsc file. This is '
+      'only used for apks under test.')
+
+  input_opts.add_argument(
+      '--shared-resources-allowlist',
+      help='An R.txt file acting as a allowlist for resources that should be '
+      'non-final and have their package ID changed at runtime in R.java. '
+      'Implies and overrides --shared-resources.')
+
+  input_opts.add_argument(
+      '--shared-resources-allowlist-locales',
+      default='[]',
+      help='Optional GN-list of locales. If provided, all strings corresponding'
+      ' to this locale list will be kept in the final output for the '
+      'resources identified through --shared-resources-allowlist, even '
+      'if --locale-allowlist is being used.')
+
+  input_opts.add_argument(
+      '--use-resource-ids-path',
+      help='Use resource IDs generated by aapt --emit-ids.')
+
+  input_opts.add_argument(
+      '--extra-main-r-text-files',
+      help='Additional R.txt files that will be added to the root R.java file, '
+      'but not packaged in the generated resources.arsc. If these resources '
+      'entries contain duplicate resources with the generated R.txt file, they '
+      'must be identical.')
+
+  input_opts.add_argument(
+      '--support-zh-hk',
+      action='store_true',
+      help='Use zh-rTW resources for zh-rHK.')
+
+  input_opts.add_argument(
+      '--debuggable',
+      action='store_true',
+      help='Whether to add android:debuggable="true".')
+
+  input_opts.add_argument('--version-code', help='Version code for apk.')
+  input_opts.add_argument('--version-name', help='Version name for apk.')
+  input_opts.add_argument(
+      '--min-sdk-version', required=True, help='android:minSdkVersion for APK.')
+  input_opts.add_argument(
+      '--target-sdk-version',
+      required=True,
+      help="android:targetSdkVersion for APK.")
+  input_opts.add_argument(
+      '--max-sdk-version',
+      help="android:maxSdkVersion expected in AndroidManifest.xml.")
+  input_opts.add_argument(
+      '--manifest-package', help='Package name of the AndroidManifest.xml.')
+
+  input_opts.add_argument(
+      '--locale-allowlist',
+      default='[]',
+      help='GN list of languages to include. All other language configs will '
+      'be stripped out. List may include a combination of Android locales '
+      'or Chrome locales.')
+  input_opts.add_argument(
+      '--resource-exclusion-regex',
+      default='',
+      help='File-based filter for resources (applied before compiling)')
+  input_opts.add_argument(
+      '--resource-exclusion-exceptions',
+      default='[]',
+      help='GN list of globs that say which files to include even '
+      'when --resource-exclusion-regex is set.')
+
+  input_opts.add_argument(
+      '--dependencies-res-zip-overlays',
+      help='GN list with subset of --dependencies-res-zips to use overlay '
+      'semantics for.')
+
+  input_opts.add_argument(
+      '--values-filter-rules',
+      help='GN list of source_glob:regex for filtering resources after they '
+      'are compiled. Use this to filter out entries within values/ files.')
+
+  input_opts.add_argument('--png-to-webp', action='store_true',
+                          help='Convert png files to webp format.')
+
+  input_opts.add_argument('--webp-binary', default='',
+                          help='Path to the cwebp binary.')
+  input_opts.add_argument(
+      '--webp-cache-dir', help='The directory to store webp image cache.')
+
+  input_opts.add_argument(
+      '--no-xml-namespaces',
+      action='store_true',
+      help='Whether to strip xml namespaces from processed xml resources.')
+  input_opts.add_argument(
+      '--short-resource-paths',
+      action='store_true',
+      help='Whether to shorten resource paths inside the apk or module.')
+  input_opts.add_argument(
+      '--strip-resource-names',
+      action='store_true',
+      help='Whether to strip resource names from the resource table of the apk '
+      'or module.')
+
+  output_opts.add_argument('--arsc-path', help='Apk output for arsc format.')
+  output_opts.add_argument('--proto-path', help='Apk output for proto format.')
+  group = input_opts.add_mutually_exclusive_group()
+  group.add_argument(
+      '--optimized-arsc-path',
+      help='Output for `aapt2 optimize` for arsc format (enables the step).')
+  group.add_argument(
+      '--optimized-proto-path',
+      help='Output for `aapt2 optimize` for proto format (enables the step).')
+  input_opts.add_argument(
+      '--resources-config-paths',
+      default='[]',
+      help='GN list of paths to aapt2 resources config files.')
+
+  output_opts.add_argument(
+      '--info-path', help='Path to output info file for the partial apk.')
+
+  output_opts.add_argument(
+      '--srcjar-out',
+      required=True,
+      help='Path to srcjar to contain generated R.java.')
+
+  output_opts.add_argument('--r-text-out',
+                           help='Path to store the generated R.txt file.')
+
+  output_opts.add_argument(
+      '--proguard-file', help='Path to proguard.txt generated file.')
+
+  output_opts.add_argument(
+      '--proguard-file-main-dex',
+      help='Path to proguard.txt generated file for main dex.')
+
+  output_opts.add_argument(
+      '--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.')
+
+  output_opts.add_argument(
+      '--resources-path-map-out-path',
+      help='Path to file produced by aapt2 that maps original resource paths '
+      'to shortened resource paths inside the apk or module.')
+
+  input_opts.add_argument(
+      '--is-bundle-module',
+      action='store_true',
+      help='Whether resources are being generated for a bundle module.')
+
+  input_opts.add_argument(
+      '--uses-split',
+      help='Value to set uses-split to in the AndroidManifest.xml.')
+
+  input_opts.add_argument(
+      '--extra-verification-manifest',
+      help='Path to AndroidManifest.xml which should be merged into base '
+      'manifest when performing verification.')
+
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+
+  resource_utils.HandleCommonOptions(options)
+
+  options.locale_allowlist = build_utils.ParseGnList(options.locale_allowlist)
+  options.shared_resources_allowlist_locales = build_utils.ParseGnList(
+      options.shared_resources_allowlist_locales)
+  options.resource_exclusion_exceptions = build_utils.ParseGnList(
+      options.resource_exclusion_exceptions)
+  options.dependencies_res_zip_overlays = build_utils.ParseGnList(
+      options.dependencies_res_zip_overlays)
+  options.values_filter_rules = build_utils.ParseGnList(
+      options.values_filter_rules)
+  options.extra_main_r_text_files = build_utils.ParseGnList(
+      options.extra_main_r_text_files)
+  options.resources_config_paths = build_utils.ParseGnList(
+      options.resources_config_paths)
+
+  if options.optimized_proto_path and not options.proto_path:
+    # We could write to a temp file, but it's simpler to require it.
+    parser.error('--optimized-proto-path requires --proto-path')
+
+  if not options.arsc_path and not options.proto_path:
+    parser.error('One of --arsc-path or --proto-path is required.')
+
+  if options.resources_path_map_out_path and not options.short_resource_paths:
+    parser.error(
+        '--resources-path-map-out-path requires --short-resource-paths')
+
+  if options.package_id and options.shared_resources:
+    parser.error('--package-id and --shared-resources are mutually exclusive')
+
+  return options
+
+
+def _IterFiles(root_dir):
+  for root, _, files in os.walk(root_dir):
+    for f in files:
+      yield os.path.join(root, f)
+
+
+def _DuplicateZhResources(resource_dirs, path_info):
+  """Duplicate Taiwanese resources into Hong-Kong specific directory."""
+  for resource_dir in resource_dirs:
+    # We use zh-TW resources for zh-HK (if we have zh-TW resources).
+    for path in _IterFiles(resource_dir):
+      if 'zh-rTW' in path:
+        hk_path = path.replace('zh-rTW', 'zh-rHK')
+        build_utils.MakeDirectory(os.path.dirname(hk_path))
+        shutil.copyfile(path, hk_path)
+        path_info.RegisterRename(
+            os.path.relpath(path, resource_dir),
+            os.path.relpath(hk_path, resource_dir))
+
+
+def _RenameLocaleResourceDirs(resource_dirs, path_info):
+  """Rename locale resource directories into standard names when necessary.
+
+  This is necessary to deal with the fact that older Android releases only
+  support ISO 639-1 two-letter codes, and sometimes even obsolete versions
+  of them.
+
+  In practice it means:
+    * 3-letter ISO 639-2 qualifiers are renamed under a corresponding
+      2-letter one. E.g. for Filipino, strings under values-fil/ will be moved
+      to a new corresponding values-tl/ sub-directory.
+
+    * Modern ISO 639-1 codes will be renamed to their obsolete variant
+      for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/).
+
+    * Norwegian macrolanguage strings will be renamed to Bokmal (main
+      Norway language). See http://crbug.com/920960. In practice this
+      means that 'values-no/ -> values-nb/' unless 'values-nb/' already
+      exists.
+
+    * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1
+      locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS').
+      Though this is not necessary at the moment, because no third-party
+      package that Chromium links against uses these for the current list of
+      supported locales, this may change when the list is extended in the
+      future).
+
+  Args:
+    resource_dirs: list of top-level resource directories.
+  """
+  for resource_dir in resource_dirs:
+    for path in _IterFiles(resource_dir):
+      locale = resource_utils.FindLocaleInStringResourceFilePath(path)
+      if not locale:
+        continue
+      cr_locale = resource_utils.ToChromiumLocaleName(locale)
+      if not cr_locale:
+        continue  # Unsupported Android locale qualifier!?
+      locale2 = resource_utils.ToAndroidLocaleName(cr_locale)
+      if locale != locale2:
+        path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2)
+        if path == path2:
+          raise Exception('Could not substitute locale %s for %s in %s' %
+                          (locale, locale2, path))
+        if os.path.exists(path2):
+          # This happens sometimes, e.g. some libraries provide both
+          # values-nb/ and values-no/ with the same content.
+          continue
+        build_utils.MakeDirectory(os.path.dirname(path2))
+        shutil.move(path, path2)
+        path_info.RegisterRename(
+            os.path.relpath(path, resource_dir),
+            os.path.relpath(path2, resource_dir))
+
+
+def _ToAndroidLocales(locale_allowlist, support_zh_hk):
+  """Converts the list of Chrome locales to Android config locale qualifiers.
+
+  Args:
+    locale_allowlist: A list of Chromium locale names.
+    support_zh_hk: True if we need to support zh-HK by duplicating
+      the zh-TW strings.
+  Returns:
+    A set of matching Android config locale qualifier names.
+  """
+  ret = set()
+  for locale in locale_allowlist:
+    locale = resource_utils.ToAndroidLocaleName(locale)
+    if locale is None or ('-' in locale and '-r' not in locale):
+      raise Exception('Unsupported Chromium locale name: %s' % locale)
+    ret.add(locale)
+    # Always keep non-regional fall-backs.
+    language = locale.split('-')[0]
+    ret.add(language)
+
+  # We don't actually support zh-HK in Chrome on Android, but we mimic the
+  # native side behavior where we use zh-TW resources when the locale is set to
+  # zh-HK. See https://crbug.com/780847.
+  if support_zh_hk:
+    assert not any('HK' in l for l in locale_allowlist), (
+        'Remove special logic if zh-HK is now supported (crbug.com/780847).')
+    ret.add('zh-rHK')
+  return set(ret)
+
+
+def _MoveImagesToNonMdpiFolders(res_root, path_info):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not os.path.splitext(src_file_name)[1] in ('.png', '.webp', ''):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+      path_info.RegisterRename(
+          os.path.relpath(src_file, res_root),
+          os.path.relpath(dst_file, res_root))
+
+
+def _FixManifest(options, temp_dir, extra_manifest=None):
+  """Fix the APK's AndroidManifest.xml.
+
+  This adds any missing namespaces for 'android' and 'tools', and
+  sets certains elements like 'platformBuildVersionCode' or
+  'android:debuggable' depending on the content of |options|.
+
+  Args:
+    options: The command-line arguments tuple.
+    temp_dir: A temporary directory where the fixed manifest will be written to.
+    extra_manifest: Path to an AndroidManifest.xml file which will get merged
+        into the application node of the base manifest.
+  Returns:
+    Tuple of:
+     * Manifest path within |temp_dir|.
+     * Original package_name.
+  """
+  def maybe_extract_version(j):
+    try:
+      return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+    except build_utils.CalledProcessError:
+      return None
+
+  android_sdk_jars = [j for j in options.include_resources
+                      if os.path.basename(j) in ('android.jar',
+                                                 'android_system.jar')]
+  extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
+  successful_extractions = [x for x in extract_all if x]
+  if len(successful_extractions) == 0:
+    raise Exception(
+        'Unable to find android SDK jar among candidates: %s'
+            % ', '.join(android_sdk_jars))
+  elif len(successful_extractions) > 1:
+    raise Exception(
+        'Found multiple android SDK jars among candidates: %s'
+            % ', '.join(android_sdk_jars))
+  version_code, version_name = successful_extractions.pop()[:2]
+
+  debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+  doc, manifest_node, app_node = manifest_utils.ParseManifest(
+      options.android_manifest)
+
+  if extra_manifest:
+    _, extra_manifest_node, extra_app_node = manifest_utils.ParseManifest(
+        extra_manifest)
+    for node in extra_app_node:
+      app_node.append(node)
+    for node in extra_manifest_node:
+      # DFM manifests have a bunch of tags we don't care about inside
+      # <manifest>, so only take <queries>.
+      if node.tag == 'queries':
+        manifest_node.append(node)
+
+  manifest_utils.AssertUsesSdk(manifest_node, options.min_sdk_version,
+                               options.target_sdk_version)
+  # We explicitly check that maxSdkVersion is set in the manifest since we don't
+  # add it later like minSdkVersion and targetSdkVersion.
+  manifest_utils.AssertUsesSdk(
+      manifest_node,
+      max_sdk_version=options.max_sdk_version,
+      fail_if_not_exist=True)
+  manifest_utils.AssertPackage(manifest_node, options.manifest_package)
+
+  manifest_node.set('platformBuildVersionCode', version_code)
+  manifest_node.set('platformBuildVersionName', version_name)
+
+  orig_package = manifest_node.get('package')
+  if options.arsc_package_name:
+    manifest_node.set('package', options.arsc_package_name)
+
+  if options.debuggable:
+    app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'),
+                 'true')
+
+  if options.uses_split:
+    uses_split = ElementTree.SubElement(manifest_node, 'uses-split')
+    uses_split.set('{%s}name' % manifest_utils.ANDROID_NAMESPACE,
+                   options.uses_split)
+
+  # Make sure the min-sdk condition is not less than the min-sdk of the bundle.
+  for min_sdk_node in manifest_node.iter('{%s}min-sdk' %
+                                         manifest_utils.DIST_NAMESPACE):
+    dist_value = '{%s}value' % manifest_utils.DIST_NAMESPACE
+    if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version):
+      min_sdk_node.set(dist_value, options.min_sdk_version)
+
+  manifest_utils.SaveManifest(doc, debug_manifest_path)
+  return debug_manifest_path, orig_package
+
+
+def _CreateKeepPredicate(resource_exclusion_regex,
+                         resource_exclusion_exceptions):
+  """Return a predicate lambda to determine which resource files to keep.
+
+  Args:
+    resource_exclusion_regex: A regular expression describing all resources
+      to exclude, except if they are mip-maps, or if they are listed
+      in |resource_exclusion_exceptions|.
+    resource_exclusion_exceptions: A list of glob patterns corresponding
+      to exceptions to the |resource_exclusion_regex|.
+  Returns:
+    A lambda that takes a path, and returns true if the corresponding file
+    must be kept.
+  """
+  predicate = lambda path: os.path.basename(path)[0] != '.'
+  if resource_exclusion_regex == '':
+    # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+    return predicate
+
+  # A simple predicate that only removes (returns False for) paths covered by
+  # the exclusion regex or listed as exceptions.
+  return lambda path: (
+      not re.search(resource_exclusion_regex, path) or
+      build_utils.MatchesGlob(path, resource_exclusion_exceptions))
+
+
+def _ComputeSha1(path):
+  with open(path, 'rb') as f:
+    data = f.read()
+  return hashlib.sha1(data).hexdigest()
+
+
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+  sha1_hash = _ComputeSha1(png_path)
+
+  # The set of arguments that will appear in the cache key.
+  quality_args = ['-m', '6', '-q', '100', '-lossless']
+
+  webp_cache_path = os.path.join(
+      webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+                                        ''.join(quality_args)))
+  # No need to add .webp. Android can load images fine without them.
+  webp_path = os.path.splitext(png_path)[0]
+
+  cache_hit = os.path.exists(webp_cache_path)
+  if cache_hit:
+    os.link(webp_cache_path, webp_path)
+  else:
+    # We place the generated webp image to webp_path, instead of in the
+    # webp_cache_dir to avoid concurrency issues.
+    args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+    subprocess.check_call(args)
+
+    try:
+      os.link(webp_path, webp_cache_path)
+    except OSError:
+      # Because of concurrent run, a webp image may already exists in
+      # webp_cache_path.
+      pass
+
+  os.remove(png_path)
+  original_dir = os.path.dirname(os.path.dirname(png_path))
+  rename_tuple = (os.path.relpath(png_path, original_dir),
+                  os.path.relpath(webp_path, original_dir))
+  return rename_tuple, cache_hit
+
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+  cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+  shard_args = [(f, ) for f in png_paths
+                if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+  build_utils.MakeDirectory(webp_cache_dir)
+  results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+                                     shard_args,
+                                     cwebp_binary=cwebp_binary,
+                                     cwebp_version=cwebp_version,
+                                     webp_cache_dir=webp_cache_dir)
+  total_cache_hits = 0
+  for rename_tuple, cache_hit in results:
+    path_info.RegisterRename(*rename_tuple)
+    total_cache_hits += int(cache_hit)
+
+  logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
+
+
+def _RemoveImageExtensions(directory, path_info):
+  """Remove extensions from image files in the passed directory.
+
+  This reduces binary size but does not affect android's ability to load the
+  images.
+  """
+  for f in _IterFiles(directory):
+    if (f.endswith('.png') or f.endswith('.webp')) and not f.endswith('.9.png'):
+      path_with_extension = f
+      path_no_extension = os.path.splitext(path_with_extension)[0]
+      if path_no_extension != path_with_extension:
+        shutil.move(path_with_extension, path_no_extension)
+        path_info.RegisterRename(
+            os.path.relpath(path_with_extension, directory),
+            os.path.relpath(path_no_extension, directory))
+
+
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+                      partials_dir):
+  unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
+  partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
+
+  compile_command = [
+      aapt2_path,
+      'compile',
+      # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+      # '--no-crunch',
+      '--dir',
+      dep_subdir,
+      '-o',
+      partial_path
+  ]
+
+  # There are resources targeting API-versions lower than our minapi. For
+  # various reasons it's easier to let aapt2 ignore these than for us to
+  # remove them from our build (e.g. it's from a 3rd party library).
+  build_utils.CheckOutput(
+      compile_command,
+      stderr_filter=lambda output: build_utils.FilterLines(
+          output, r'ignoring configuration .* for (styleable|attribute)'))
+
+  # Filtering these files is expensive, so only apply filters to the partials
+  # that have been explicitly targeted.
+  if keep_predicate:
+    logging.debug('Applying .arsc filtering to %s', dep_subdir)
+    protoresources.StripUnwantedResources(partial_path, keep_predicate)
+  return partial_path
+
+
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
+  patterns = [
+      x[1] for x in exclusion_rules
+      if build_utils.MatchesGlob(dep_subdir, [x[0]])
+  ]
+  if not patterns:
+    return None
+
+  regexes = [re.compile(p) for p in patterns]
+  return lambda x: not any(r.search(x) for r in regexes)
+
+
+def _CompileDeps(aapt2_path, dep_subdirs, dep_subdir_overlay_set, temp_dir,
+                 exclusion_rules):
+  partials_dir = os.path.join(temp_dir, 'partials')
+  build_utils.MakeDirectory(partials_dir)
+
+  job_params = [(i, dep_subdir,
+                 _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+                for i, dep_subdir in enumerate(dep_subdirs)]
+
+  # Filtering is slow, so ensure jobs with keep_predicate are started first.
+  job_params.sort(key=lambda x: not x[2])
+  partials = list(
+      parallel.BulkForkAndCall(_CompileSingleDep,
+                               job_params,
+                               aapt2_path=aapt2_path,
+                               partials_dir=partials_dir))
+
+  partials_cmd = list()
+  for i, partial in enumerate(partials):
+    dep_subdir = job_params[i][1]
+    if dep_subdir in dep_subdir_overlay_set:
+      partials_cmd += ['-R']
+    partials_cmd += [partial]
+  return partials_cmd
+
+
+def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
+  for zip_file in dependencies_res_zips:
+    zip_info_file_path = zip_file + '.info'
+    if os.path.exists(zip_info_file_path):
+      path_info.MergeInfoFile(zip_info_file_path)
+  path_info.Write(info_path)
+
+
+def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
+  """Remove localized strings that should not go into the final output.
+
+  Args:
+    dep_subdirs: List of resource dependency directories.
+    options: Command-line options namespace.
+  """
+  # Collect locale and file paths from the existing subdirs.
+  # The following variable maps Android locale names to
+  # sets of corresponding xml file paths.
+  locale_to_files_map = collections.defaultdict(set)
+  for directory in dep_subdirs:
+    for f in _IterFiles(directory):
+      locale = resource_utils.FindLocaleInStringResourceFilePath(f)
+      if locale:
+        locale_to_files_map[locale].add(f)
+
+  all_locales = set(locale_to_files_map)
+
+  # Set A: wanted locales, either all of them or the
+  # list provided by --locale-allowlist.
+  wanted_locales = all_locales
+  if options.locale_allowlist:
+    wanted_locales = _ToAndroidLocales(options.locale_allowlist,
+                                       options.support_zh_hk)
+
+  # Set B: shared resources locales, which is either set A
+  # or the list provided by --shared-resources-allowlist-locales
+  shared_resources_locales = wanted_locales
+  shared_names_allowlist = set()
+  if options.shared_resources_allowlist_locales:
+    shared_names_allowlist = set(
+        resource_utils.GetRTxtStringResourceNames(
+            options.shared_resources_allowlist))
+
+    shared_resources_locales = _ToAndroidLocales(
+        options.shared_resources_allowlist_locales, options.support_zh_hk)
+
+  # Remove any file that belongs to a locale not covered by
+  # either A or B.
+  removable_locales = (all_locales - wanted_locales - shared_resources_locales)
+  for locale in removable_locales:
+    for path in locale_to_files_map[locale]:
+      os.remove(path)
+
+  # For any locale in B but not in A, only keep the shared
+  # resource strings in each file.
+  for locale in shared_resources_locales - wanted_locales:
+    for path in locale_to_files_map[locale]:
+      resource_utils.FilterAndroidResourceStringsXml(
+          path, lambda x: x in shared_names_allowlist)
+
+  # For any locale in A but not in B, only keep the strings
+  # that are _not_ from shared resources in the file.
+  for locale in wanted_locales - shared_resources_locales:
+    for path in locale_to_files_map[locale]:
+      resource_utils.FilterAndroidResourceStringsXml(
+          path, lambda x: x not in shared_names_allowlist)
+
+
+def _FilterResourceFiles(dep_subdirs, keep_predicate):
+  # Create a function that selects which resource files should be packaged
+  # into the final output. Any file that does not pass the predicate will
+  # be removed below.
+  png_paths = []
+  for directory in dep_subdirs:
+    for f in _IterFiles(directory):
+      if not keep_predicate(f):
+        os.remove(f)
+      elif f.endswith('.png'):
+        png_paths.append(f)
+
+  return png_paths
+
+
+def _PackageApk(options, build):
+  """Compile and link resources with aapt2.
+
+  Args:
+    options: The command-line options.
+    build: BuildContext object.
+  Returns:
+    The manifest package name for the APK.
+  """
+  logging.debug('Extracting resource .zips')
+  dep_subdirs = []
+  dep_subdir_overlay_set = set()
+  for dependency_res_zip in options.dependencies_res_zips:
+    extracted_dep_subdirs = resource_utils.ExtractDeps([dependency_res_zip],
+                                                       build.deps_dir)
+    dep_subdirs += extracted_dep_subdirs
+    if dependency_res_zip in options.dependencies_res_zip_overlays:
+      dep_subdir_overlay_set.update(extracted_dep_subdirs)
+
+  logging.debug('Applying locale transformations')
+  path_info = resource_utils.ResourceInfoFile()
+  if options.support_zh_hk:
+    _DuplicateZhResources(dep_subdirs, path_info)
+  _RenameLocaleResourceDirs(dep_subdirs, path_info)
+
+  logging.debug('Applying file-based exclusions')
+  keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex,
+                                        options.resource_exclusion_exceptions)
+  png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate)
+
+  if options.locale_allowlist or options.shared_resources_allowlist_locales:
+    logging.debug('Applying locale-based string exclusions')
+    _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
+  if png_paths and options.png_to_webp:
+    logging.debug('Converting png->webp')
+    _ConvertToWebP(options.webp_binary, png_paths, path_info,
+                   options.webp_cache_dir)
+  logging.debug('Applying drawable transformations')
+  for directory in dep_subdirs:
+    _MoveImagesToNonMdpiFolders(directory, path_info)
+    _RemoveImageExtensions(directory, path_info)
+
+  logging.debug('Running aapt2 compile')
+  exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules]
+  partials = _CompileDeps(options.aapt2_path, dep_subdirs,
+                          dep_subdir_overlay_set, build.temp_dir,
+                          exclusion_rules)
+
+  link_command = [
+      options.aapt2_path,
+      'link',
+      '--auto-add-overlay',
+      '--no-version-vectors',
+      # Set SDK versions in case they are not set in the Android manifest.
+      '--min-sdk-version',
+      options.min_sdk_version,
+      '--target-sdk-version',
+      options.target_sdk_version,
+  ]
+
+  for j in options.include_resources:
+    link_command += ['-I', j]
+  if options.version_code:
+    link_command += ['--version-code', options.version_code]
+  if options.version_name:
+    link_command += ['--version-name', options.version_name]
+  if options.proguard_file:
+    link_command += ['--proguard', build.proguard_path]
+    link_command += ['--proguard-minimal-keep-rules']
+  if options.proguard_file_main_dex:
+    link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
+  if options.emit_ids_out:
+    link_command += ['--emit-ids', build.emit_ids_path]
+  if options.r_text_in:
+    shutil.copyfile(options.r_text_in, build.r_txt_path)
+  else:
+    link_command += ['--output-text-symbols', build.r_txt_path]
+
+  # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
+  #       can be used with recent versions of aapt2.
+  if options.shared_resources:
+    link_command.append('--shared-lib')
+
+  if options.no_xml_namespaces:
+    link_command.append('--no-xml-namespaces')
+
+  if options.package_id:
+    link_command += [
+        '--package-id',
+        hex(options.package_id),
+        '--allow-reserved-package-id',
+    ]
+
+  fixed_manifest, desired_manifest_package_name = _FixManifest(
+      options, build.temp_dir)
+  if options.rename_manifest_package:
+    desired_manifest_package_name = options.rename_manifest_package
+
+  link_command += [
+      '--manifest', fixed_manifest, '--rename-manifest-package',
+      desired_manifest_package_name
+  ]
+
+  # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+  # Also creates R.txt
+  if options.use_resource_ids_path:
+    _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
+                         desired_manifest_package_name)
+    link_command += ['--stable-ids', build.stable_ids_path]
+
+  link_command += partials
+
+  # We always create a binary arsc file first, then convert to proto, so flags
+  # such as --shared-lib can be supported.
+  arsc_path = build.arsc_path
+  if arsc_path is None:
+    _, arsc_path = tempfile.mkstmp()
+  link_command += ['-o', build.arsc_path]
+
+  logging.debug('Starting: aapt2 link')
+  link_proc = subprocess.Popen(link_command)
+
+  # Create .res.info file in parallel.
+  _CreateResourceInfoFile(path_info, build.info_path,
+                          options.dependencies_res_zips)
+  logging.debug('Created .res.info file')
+
+  exit_code = link_proc.wait()
+  logging.debug('Finished: aapt2 link')
+  if exit_code:
+    raise subprocess.CalledProcessError(exit_code, link_command)
+
+  if options.proguard_file and (options.shared_resources
+                                or options.app_as_shared_lib):
+    # Make sure the R class associated with the manifest package does not have
+    # its onResourcesLoaded method obfuscated or removed, so that the framework
+    # can call it in the case where the APK is being loaded as a library.
+    with open(build.proguard_path, 'a') as proguard_file:
+      keep_rule = '''
+                  -keep class {package}.R {{
+                    public static void onResourcesLoaded(int);
+                  }}
+                  '''.format(package=desired_manifest_package_name)
+      proguard_file.write(textwrap.dedent(keep_rule))
+
+  logging.debug('Running aapt2 convert')
+  build_utils.CheckOutput([
+      options.aapt2_path, 'convert', '--output-format', 'proto', '-o',
+      build.proto_path, build.arsc_path
+  ])
+
+  # Workaround for b/147674078. This is only needed for WebLayer and does not
+  # affect WebView usage, since WebView does not used dynamic attributes.
+  if options.shared_resources:
+    logging.debug('Hardcoding dynamic attributes')
+    protoresources.HardcodeSharedLibraryDynamicAttributes(
+        build.proto_path, options.is_bundle_module,
+        options.shared_resources_allowlist)
+
+    build_utils.CheckOutput([
+        options.aapt2_path, 'convert', '--output-format', 'binary', '-o',
+        build.arsc_path, build.proto_path
+    ])
+
+  if build.arsc_path is None:
+    os.remove(arsc_path)
+
+  if options.optimized_proto_path:
+    _OptimizeApk(build.optimized_proto_path, options, build.temp_dir,
+                 build.proto_path, build.r_txt_path)
+  elif options.optimized_arsc_path:
+    _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
+                 build.arsc_path, build.r_txt_path)
+
+  return desired_manifest_package_name
+
+
+def _CombineResourceConfigs(resources_config_paths, out_config_path):
+  with open(out_config_path, 'w') as out_config:
+    for config_path in resources_config_paths:
+      with open(config_path) as config:
+        out_config.write(config.read())
+        out_config.write('\n')
+
+
+def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path):
+  """Optimize intermediate .ap_ file with aapt2.
+
+  Args:
+    output: Path to write to.
+    options: The command-line options.
+    temp_dir: A temporary directory.
+    unoptimized_path: path of the apk to optimize.
+    r_txt_path: path to the R.txt file of the unoptimized apk.
+  """
+  optimize_command = [
+      options.aapt2_path,
+      'optimize',
+      unoptimized_path,
+      '-o',
+      output,
+  ]
+
+  # Optimize the resources.arsc file by obfuscating resource names and only
+  # allow usage via R.java constant.
+  if options.strip_resource_names:
+    no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path)
+    gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+    if options.resources_config_paths:
+      _CombineResourceConfigs(options.resources_config_paths, gen_config_path)
+    with open(gen_config_path, 'a') as config:
+      for resource in no_collapse_resources:
+        config.write('{}#no_collapse\n'.format(resource))
+
+    optimize_command += [
+        '--collapse-resource-names',
+        '--resources-config-path',
+        gen_config_path,
+    ]
+
+  if options.short_resource_paths:
+    optimize_command += ['--shorten-resource-paths']
+  if options.resources_path_map_out_path:
+    optimize_command += [
+        '--resource-path-shortening-map', options.resources_path_map_out_path
+    ]
+
+  logging.debug('Running aapt2 optimize')
+  build_utils.CheckOutput(
+      optimize_command, print_stdout=False, print_stderr=False)
+
+
+def _ExtractNonCollapsableResources(rtxt_path):
+  """Extract resources that should not be collapsed from the R.txt file
+
+  Resources of type ID are references to UI elements/views. They are used by
+  UI automation testing frameworks. They are kept in so that they don't break
+  tests, even though they may not actually be used during runtime. See
+  https://crbug.com/900993
+  App icons (aka mipmaps) are sometimes referenced by other apps by name so must
+  be keps as well. See https://b/161564466
+
+  Args:
+    rtxt_path: Path to R.txt file with all the resources
+  Returns:
+    List of resources in the form of <resource_type>/<resource_name>
+  """
+  resources = []
+  _NO_COLLAPSE_TYPES = ['id', 'mipmap']
+  with open(rtxt_path) as rtxt:
+    for line in rtxt:
+      for resource_type in _NO_COLLAPSE_TYPES:
+        if ' {} '.format(resource_type) in line:
+          resource_name = line.split()[2]
+          resources.append('{}/{}'.format(resource_type, resource_name))
+  return resources
+
+
+@contextlib.contextmanager
+def _CreateStableIdsFile(in_path, out_path, package_name):
+  """Transforms a file generated by --emit-ids from another package.
+
+  --stable-ids is generally meant to be used by different versions of the same
+  package. To make it work for other packages, we need to transform the package
+  name references to match the package that resources are being generated for.
+
+  Note: This will fail if the package ID of the resources in
+  |options.use_resource_ids_path| does not match the package ID of the
+  resources being linked.
+  """
+  with open(in_path) as stable_ids_file:
+    with open(out_path, 'w') as output_ids_file:
+      output_stable_ids = re.sub(
+          r'^.*?:',
+          package_name + ':',
+          stable_ids_file.read(),
+          flags=re.MULTILINE)
+      output_ids_file.write(output_stable_ids)
+
+
+def _WriteOutputs(options, build):
+  possible_outputs = [
+      (options.srcjar_out, build.srcjar_path),
+      (options.r_text_out, build.r_txt_path),
+      (options.arsc_path, build.arsc_path),
+      (options.proto_path, build.proto_path),
+      (options.optimized_arsc_path, build.optimized_arsc_path),
+      (options.optimized_proto_path, build.optimized_proto_path),
+      (options.proguard_file, build.proguard_path),
+      (options.proguard_file_main_dex, build.proguard_main_dex_path),
+      (options.emit_ids_out, build.emit_ids_path),
+      (options.info_path, build.info_path),
+  ]
+
+  for final, temp in possible_outputs:
+    # Write file only if it's changed.
+    if final and not (os.path.exists(final) and filecmp.cmp(final, temp)):
+      shutil.move(temp, final)
+
+
+def _CreateNormalizedManifestForVerification(options):
+  with build_utils.TempDir() as tempdir:
+    fixed_manifest, _ = _FixManifest(
+        options, tempdir, extra_manifest=options.extra_verification_manifest)
+    with open(fixed_manifest) as f:
+      return manifest_utils.NormalizeManifest(f.read())
+
+
+def main(args):
+  build_utils.InitLogging('RESOURCE_DEBUG')
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  if options.expected_file:
+    actual_data = _CreateNormalizedManifestForVerification(options)
+    diff_utils.CheckExpectations(actual_data, options)
+    if options.only_verify_expectations:
+      return
+
+  path = options.arsc_path or options.proto_path
+  debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR')
+  if debug_temp_resources_dir:
+    path = os.path.join(debug_temp_resources_dir, os.path.basename(path))
+  else:
+    # Use a deterministic temp directory since .pb files embed the absolute
+    # path of resources: crbug.com/939984
+    path = path + '.tmpdir'
+  build_utils.DeleteDirectory(path)
+
+  with resource_utils.BuildContext(
+      temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build:
+
+    manifest_package_name = _PackageApk(options, build)
+
+    # If --shared-resources-allowlist is used, all the resources listed in the
+    # corresponding R.txt file will be non-final, and an onResourcesLoaded()
+    # will be generated to adjust them at runtime.
+    #
+    # Otherwise, if --shared-resources is used, the all resources will be
+    # non-final, and an onResourcesLoaded() method will be generated too.
+    #
+    # Otherwise, all resources will be final, and no method will be generated.
+    #
+    rjava_build_options = resource_utils.RJavaBuildOptions()
+    if options.shared_resources_allowlist:
+      rjava_build_options.ExportSomeResources(
+          options.shared_resources_allowlist)
+      rjava_build_options.GenerateOnResourcesLoaded()
+      if options.shared_resources:
+        # The final resources will only be used in WebLayer, so hardcode the
+        # package ID to be what WebLayer expects.
+        rjava_build_options.SetFinalPackageId(
+            protoresources.SHARED_LIBRARY_HARDCODED_ID)
+    elif options.shared_resources or options.app_as_shared_lib:
+      rjava_build_options.ExportAllResources()
+      rjava_build_options.GenerateOnResourcesLoaded()
+
+    custom_root_package_name = options.r_java_root_package_name
+    grandparent_custom_package_name = None
+
+    # Always generate an R.java file for the package listed in
+    # AndroidManifest.xml because this is where Android framework looks to find
+    # onResourcesLoaded() for shared library apks. While not actually necessary
+    # for application apks, it also doesn't hurt.
+    apk_package_name = manifest_package_name
+
+    if options.package_name and not options.arsc_package_name:
+      # Feature modules have their own custom root package name and should
+      # inherit from the appropriate base module package. This behaviour should
+      # not be present for test apks with an apk under test. Thus,
+      # arsc_package_name is used as it is only defined for test apks with an
+      # apk under test.
+      custom_root_package_name = options.package_name
+      grandparent_custom_package_name = options.r_java_root_package_name
+      # Feature modules have the same manifest package as the base module but
+      # they should not create an R.java for said manifest package because it
+      # will be created in the base module.
+      apk_package_name = None
+
+    logging.debug('Creating R.srcjar')
+    resource_utils.CreateRJavaFiles(
+        build.srcjar_dir, apk_package_name, build.r_txt_path,
+        options.extra_res_packages, rjava_build_options, options.srcjar_out,
+        custom_root_package_name, grandparent_custom_package_name,
+        options.extra_main_r_text_files)
+    build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
+
+    # Sanity check that the created resources have the expected package ID.
+    logging.debug('Performing sanity check')
+    if options.package_id:
+      expected_id = options.package_id
+    elif options.shared_resources:
+      expected_id = 0
+    else:
+      expected_id = 127  # == '0x7f'.
+    _, package_id = resource_utils.ExtractArscPackage(
+        options.aapt2_path,
+        build.arsc_path if options.arsc_path else build.proto_path)
+    if package_id != expected_id:
+      raise Exception(
+          'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
+
+    logging.debug('Copying outputs')
+    _WriteOutputs(options, build)
+
+  if options.depfile:
+    depfile_deps = (options.dependencies_res_zips +
+                    options.dependencies_res_zip_overlays +
+                    options.extra_main_r_text_files + options.include_resources)
+    build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/compile_resources.pydeps b/src/build/android/gyp/compile_resources.pydeps
new file mode 100644
index 0000000..174b526
--- /dev/null
+++ b/src/build/android/gyp/compile_resources.pydeps
@@ -0,0 +1,61 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../../third_party/protobuf/python/google/__init__.py
+../../../third_party/protobuf/python/google/protobuf/__init__.py
+../../../third_party/protobuf/python/google/protobuf/descriptor.py
+../../../third_party/protobuf/python/google/protobuf/descriptor_database.py
+../../../third_party/protobuf/python/google/protobuf/descriptor_pool.py
+../../../third_party/protobuf/python/google/protobuf/internal/__init__.py
+../../../third_party/protobuf/python/google/protobuf/internal/api_implementation.py
+../../../third_party/protobuf/python/google/protobuf/internal/containers.py
+../../../third_party/protobuf/python/google/protobuf/internal/decoder.py
+../../../third_party/protobuf/python/google/protobuf/internal/encoder.py
+../../../third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py
+../../../third_party/protobuf/python/google/protobuf/internal/extension_dict.py
+../../../third_party/protobuf/python/google/protobuf/internal/message_listener.py
+../../../third_party/protobuf/python/google/protobuf/internal/python_message.py
+../../../third_party/protobuf/python/google/protobuf/internal/type_checkers.py
+../../../third_party/protobuf/python/google/protobuf/internal/well_known_types.py
+../../../third_party/protobuf/python/google/protobuf/internal/wire_format.py
+../../../third_party/protobuf/python/google/protobuf/message.py
+../../../third_party/protobuf/python/google/protobuf/message_factory.py
+../../../third_party/protobuf/python/google/protobuf/reflection.py
+../../../third_party/protobuf/python/google/protobuf/symbol_database.py
+../../../third_party/protobuf/python/google/protobuf/text_encoding.py
+../../../third_party/protobuf/python/google/protobuf/text_format.py
+../../../third_party/six/src/six.py
+../../gn_helpers.py
+compile_resources.py
+proto/Configuration_pb2.py
+proto/Resources_pb2.py
+proto/__init__.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/manifest_utils.py
+util/parallel.py
+util/protoresources.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/copy_ex.py b/src/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..41604c4
--- /dev/null
+++ b/src/build/android/gyp/copy_ex.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+from __future__ import print_function
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+  """Returns a list of all the files in |base|. Each entry is relative to the
+  last path entry of |base|."""
+  result = []
+  dirname = os.path.dirname(base)
+  for root, _, files in os.walk(base):
+    result.extend([os.path.join(root[len(dirname):], f) for f in files])
+  return result
+
+def CopyFile(f, dest, deps):
+  """Copy file or directory and update deps."""
+  if os.path.isdir(f):
+    shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+    deps.extend(_get_all_files(f))
+  else:
+    if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+      dest = os.path.join(dest, os.path.basename(f))
+
+    deps.append(f)
+
+    if os.path.isfile(dest):
+      if filecmp.cmp(dest, f, shallow=False):
+        return
+      # The shutil.copy() below would fail if the file does not have write
+      # permissions. Deleting the file has similar costs to modifying the
+      # permissions.
+      os.unlink(dest)
+
+    shutil.copy(f, dest)
+
+def DoCopy(options, deps):
+  """Copy files or directories given in options.files and update deps."""
+  files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
+                                             for f in options.files))
+
+  for f in files:
+    if os.path.isdir(f) and not options.clear:
+      print('To avoid stale files you must use --clear when copying '
+            'directories')
+      sys.exit(-1)
+    CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+  """Copy and rename files given in options.renaming_sources and update deps."""
+  src_files = list(itertools.chain.from_iterable(
+                   build_utils.ParseGnList(f)
+                   for f in options.renaming_sources))
+
+  dest_files = list(itertools.chain.from_iterable(
+                    build_utils.ParseGnList(f)
+                    for f in options.renaming_destinations))
+
+  if (len(src_files) != len(dest_files)):
+    print('Renaming source and destination files not match.')
+    sys.exit(-1)
+
+  for src, dest in zip(src_files, dest_files):
+    if os.path.isdir(src):
+      print('renaming diretory is not supported.')
+      sys.exit(-1)
+    else:
+      CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--renaming-sources',
+                    action='append',
+                    help='List of files need to be renamed while being '
+                         'copied to dest directory')
+  parser.add_option('--renaming-destinations',
+                    action='append',
+                    help='List of destination file name without path, the '
+                         'number of elements must match rename-sources.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  deps = []
+
+  if options.files:
+    DoCopy(options, deps)
+
+  if options.renaming_sources:
+    DoRenaming(options, deps)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, options.stamp, deps)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/copy_ex.pydeps b/src/build/android/gyp/copy_ex.pydeps
new file mode 100644
index 0000000..3735251
--- /dev/null
+++ b/src/build/android/gyp/copy_ex.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_apk_operations_script.py b/src/build/android/gyp/create_apk_operations_script.py
new file mode 100755
index 0000000..660567f
--- /dev/null
+++ b/src/build/android/gyp/create_apk_operations_script.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_apk_operations_script.py
+
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+  resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+      script_directory, p))
+  sys.path.append(resolve(${APK_OPERATIONS_DIR}))
+  import apk_operations
+  output_dir = resolve(${OUTPUT_DIR})
+  try:
+    apk_operations.Run(
+        output_dir,
+        resolve(${APK_PATH}),
+        [resolve(p) for p in ${ADDITIONAL_APK_PATHS}],
+        resolve(${INC_JSON_PATH}),
+        ${FLAGS_FILE},
+        ${TARGET_CPU},
+        resolve(${MAPPING_PATH}))
+  except TypeError:
+    rel_output_dir = os.path.relpath(output_dir)
+    rel_script_path = os.path.relpath(sys.argv[0], output_dir)
+    sys.stderr.write('Script out-of-date. Rebuild via:\\n')
+    sys.stderr.write('  ninja -C %s %s\\n' % (rel_output_dir, rel_script_path))
+    return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+""")
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path',
+                      help='Output path for executable script.')
+  parser.add_argument('--apk-path')
+  parser.add_argument('--incremental-install-json-path')
+  parser.add_argument('--command-line-flags-file')
+  parser.add_argument('--target-cpu')
+  parser.add_argument(
+      '--additional-apk-path',
+      action='append',
+      dest='additional_apk_paths',
+      default=[],
+      help='Paths to APKs to be installed prior to --apk-path.')
+  parser.add_argument('--proguard-mapping-path')
+  args = parser.parse_args(args)
+
+  def relativize(path):
+    """Returns the path relative to the output script directory."""
+    if path is None:
+      return path
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+  apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+  apk_operations_dir = relativize(apk_operations_dir)
+
+  with open(args.script_output_path, 'w') as script:
+    script_dict = {
+        'APK_OPERATIONS_DIR': repr(apk_operations_dir),
+        'OUTPUT_DIR': repr(relativize('.')),
+        'APK_PATH': repr(relativize(args.apk_path)),
+        'ADDITIONAL_APK_PATHS':
+        [relativize(p) for p in args.additional_apk_paths],
+        'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)),
+        'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)),
+        'FLAGS_FILE': repr(args.command_line_flags_file),
+        'TARGET_CPU': repr(args.target_cpu),
+    }
+    script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+  os.chmod(args.script_output_path, 0o750)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_apk_operations_script.pydeps b/src/build/android/gyp/create_apk_operations_script.pydeps
new file mode 100644
index 0000000..e09bb72
--- /dev/null
+++ b/src/build/android/gyp/create_apk_operations_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py
+../../gn_helpers.py
+create_apk_operations_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_app_bundle.py b/src/build/android/gyp/create_app_bundle.py
new file mode 100755
index 0000000..0b44c16
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle.py
@@ -0,0 +1,532 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create an Android application bundle from one or more bundle modules."""
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import zipfile
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import dexdump
+
+from util import build_utils
+from util import manifest_utils
+from util import resource_utils
+from xml.etree import ElementTree
+
+import bundletool
+
+# Location of language-based assets in bundle modules.
+_LOCALES_SUBDIR = 'assets/locales/'
+
+# The fallback locale should always have its .pak file included in
+# the base apk, i.e. not use language-based asset targetting. This ensures
+# that Chrome won't crash on startup if its bundle is installed on a device
+# with an unsupported system locale (e.g. fur-rIT).
+_FALLBACK_LOCALE = 'en-US'
+
+# List of split dimensions recognized by this tool.
+_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ]
+
+# Due to historical reasons, certain languages identified by Chromium with a
+# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters
+# ISO 639-1 code instead (due to the fact that older Android releases only
+# supported the latter when matching resources).
+#
+# the same conversion as for Java resources.
+_SHORTEN_LANGUAGE_CODE_MAP = {
+  'fil': 'tl',  # Filipino to Tagalog.
+}
+
+# A list of extensions corresponding to files that should never be compressed
+# in the bundle. This used to be handled by bundletool automatically until
+# release 0.8.0, which required that this be passed to the BundleConfig
+# file instead.
+#
+# This is the original list, which was taken from aapt2, with 'webp' added to
+# it (which curiously was missing from the list).
+_UNCOMPRESSED_FILE_EXTS = [
+    '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet',
+    'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4',
+    'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv',
+    'xmf'
+]
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--out-bundle', required=True,
+                      help='Output bundle zip archive.')
+  parser.add_argument('--module-zips', required=True,
+                      help='GN-list of module zip archives.')
+  parser.add_argument(
+      '--pathmap-in-paths',
+      action='append',
+      help='List of module pathmap files.')
+  parser.add_argument(
+      '--module-name',
+      action='append',
+      dest='module_names',
+      help='List of module names.')
+  parser.add_argument(
+      '--pathmap-out-path', help='Path to combined pathmap file for bundle.')
+  parser.add_argument(
+      '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+  parser.add_argument(
+      '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
+  parser.add_argument('--uncompressed-assets', action='append',
+                      help='GN-list of uncompressed assets.')
+  parser.add_argument(
+      '--compress-shared-libraries',
+      action='store_true',
+      help='Whether to store native libraries compressed.')
+  parser.add_argument('--split-dimensions',
+                      help="GN-list of split dimensions to support.")
+  parser.add_argument(
+      '--base-module-rtxt-path',
+      help='Optional path to the base module\'s R.txt file, only used with '
+      'language split dimension.')
+  parser.add_argument(
+      '--base-allowlist-rtxt-path',
+      help='Optional path to an R.txt file, string resources '
+      'listed there _and_ in --base-module-rtxt-path will '
+      'be kept in the base bundle module, even if language'
+      ' splitting is enabled.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+
+  parser.add_argument(
+      '--validate-services',
+      action='store_true',
+      help='Check if services are in base module if isolatedSplits is enabled.')
+
+  options = parser.parse_args(args)
+  options.module_zips = build_utils.ParseGnList(options.module_zips)
+  options.rtxt_in_paths = build_utils.ParseGnList(options.rtxt_in_paths)
+  options.pathmap_in_paths = build_utils.ParseGnList(options.pathmap_in_paths)
+
+  if len(options.module_zips) == 0:
+    raise Exception('The module zip list cannot be empty.')
+
+  # Merge all uncompressed assets into a set.
+  uncompressed_list = []
+  if options.uncompressed_assets:
+    for l in options.uncompressed_assets:
+      for entry in build_utils.ParseGnList(l):
+        # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+        pos = entry.find(':')
+        if pos >= 0:
+          uncompressed_list.append(entry[pos + 1:])
+        else:
+          uncompressed_list.append(entry)
+
+  options.uncompressed_assets = set(uncompressed_list)
+
+  # Check that all split dimensions are valid
+  if options.split_dimensions:
+    options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+    for dim in options.split_dimensions:
+      if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
+        parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
+            dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS)))
+
+  # As a special case, --base-allowlist-rtxt-path can be empty to indicate
+  # that the module doesn't need such a allowlist. That's because it is easier
+  # to check this condition here than through GN rules :-(
+  if options.base_allowlist_rtxt_path == '':
+    options.base_module_rtxt_path = None
+
+  # Check --base-module-rtxt-path and --base-allowlist-rtxt-path usage.
+  if options.base_module_rtxt_path:
+    if not options.base_allowlist_rtxt_path:
+      parser.error(
+          '--base-module-rtxt-path requires --base-allowlist-rtxt-path')
+    if 'language' not in options.split_dimensions:
+      parser.error('--base-module-rtxt-path is only valid with '
+                   'language-based splits.')
+
+  return options
+
+
+def _MakeSplitDimension(value, enabled):
+  """Return dict modelling a BundleConfig splitDimension entry."""
+  return {'value': value, 'negate': not enabled}
+
+
+def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
+                              split_dimensions, base_master_resource_ids):
+  """Generate a dictionary that can be written to a JSON BuildConfig.
+
+  Args:
+    uncompressed_assets: A list or set of file paths under assets/ that always
+      be stored uncompressed.
+    compress_shared_libraries: Boolean, whether to compress native libs.
+    split_dimensions: list of split dimensions.
+    base_master_resource_ids: Optional list of 32-bit resource IDs to keep
+      inside the base module, even when split dimensions are enabled.
+  Returns:
+    A dictionary that can be written as a json file.
+  """
+  # Compute splitsConfig list. Each item is a dictionary that can have
+  # the following keys:
+  #    'value': One of ['LANGUAGE', 'DENSITY', 'ABI']
+  #    'negate': Boolean, True to indicate that the bundle should *not* be
+  #              split (unused at the moment by this script).
+
+  split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions)
+                       for dim in _ALL_SPLIT_DIMENSIONS ]
+
+  # Native libraries loaded by the crazy linker.
+  # Whether other .so files are compressed is controlled by
+  # "uncompressNativeLibraries".
+  uncompressed_globs = ['lib/*/crazy.*']
+  # Locale-specific pak files stored in bundle splits need not be compressed.
+  uncompressed_globs.extend(
+      ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'])
+  uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+  # NOTE: Use '**' instead of '*' to work through directories!
+  uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+
+  data = {
+      'optimizations': {
+          'splitsConfig': {
+              'splitDimension': split_dimensions,
+          },
+          'uncompressNativeLibraries': {
+              'enabled': not compress_shared_libraries,
+          },
+          'uncompressDexFiles': {
+              'enabled': True,  # Applies only for P+.
+          }
+      },
+      'compression': {
+          'uncompressedGlob': sorted(uncompressed_globs),
+      },
+  }
+
+  if base_master_resource_ids:
+    data['master_resources'] = {
+        'resource_ids': list(base_master_resource_ids),
+    }
+
+  return json.dumps(data, indent=2)
+
+
+def _RewriteLanguageAssetPath(src_path):
+  """Rewrite the destination path of a locale asset for language-based splits.
+
+  Should only be used when generating bundles with language-based splits.
+  This will rewrite paths that look like locales/<locale>.pak into
+  locales#<language>/<locale>.pak, where <language> is the language code
+  from the locale.
+
+  Returns new path.
+  """
+  if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
+    return [src_path]
+
+  locale = src_path[len(_LOCALES_SUBDIR):-4]
+  android_locale = resource_utils.ToAndroidLocaleName(locale)
+
+  # The locale format is <lang>-<region> or <lang> or BCP-47 (e.g b+sr+Latn).
+  # Extract the language.
+  pos = android_locale.find('-')
+  if android_locale.startswith('b+'):
+    # If locale is in BCP-47 the language is the second tag (e.g. b+sr+Latn)
+    android_language = android_locale.split('+')[1]
+  elif pos >= 0:
+    android_language = android_locale[:pos]
+  else:
+    android_language = android_locale
+
+  if locale == _FALLBACK_LOCALE:
+    # Fallback locale .pak files must be placed in a different directory
+    # to ensure they are always stored in the base module.
+    result_path = 'assets/fallback-locales/%s.pak' % locale
+  else:
+    # Other language .pak files go into a language-specific asset directory
+    # that bundletool will store in separate split APKs.
+    result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+
+  return result_path
+
+
+def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
+  """Splits assets in a module if needed.
+
+  Args:
+    src_module_zip: input zip module path.
+    tmp_dir: Path to temporary directory, where the new output module might
+      be written to.
+    split_dimensions: list of split dimensions.
+
+  Returns:
+    If the module doesn't need asset targeting, doesn't do anything and
+    returns src_module_zip. Otherwise, create a new module zip archive under
+    tmp_dir with the same file name, but which contains assets paths targeting
+    the proper dimensions.
+  """
+  split_language = 'LANGUAGE' in split_dimensions
+  if not split_language:
+    # Nothing to target, so return original module path.
+    return src_module_zip
+
+  with zipfile.ZipFile(src_module_zip, 'r') as src_zip:
+    language_files = [
+      f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)]
+
+    if not language_files:
+      # Not language-based assets to split in this module.
+      return src_module_zip
+
+    tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip))
+    with zipfile.ZipFile(tmp_zip, 'w') as dst_zip:
+      for info in src_zip.infolist():
+        src_path = info.filename
+        is_compressed = info.compress_type != zipfile.ZIP_STORED
+
+        dst_path = src_path
+        if src_path in language_files:
+          dst_path = _RewriteLanguageAssetPath(src_path)
+
+        build_utils.AddToZipHermetic(
+            dst_zip,
+            dst_path,
+            data=src_zip.read(src_path),
+            compress=is_compressed)
+
+    return tmp_zip
+
+
+def _GenerateBaseResourcesAllowList(base_module_rtxt_path,
+                                    base_allowlist_rtxt_path):
+  """Generate a allowlist of base master resource ids.
+
+  Args:
+    base_module_rtxt_path: Path to base module R.txt file.
+    base_allowlist_rtxt_path: Path to base allowlist R.txt file.
+  Returns:
+    list of resource ids.
+  """
+  ids_map = resource_utils.GenerateStringResourcesAllowList(
+      base_module_rtxt_path, base_allowlist_rtxt_path)
+  return ids_map.keys()
+
+
+def _ConcatTextFiles(in_paths, out_path):
+  """Concatenate the contents of multiple text files into one.
+
+  The each file contents is preceded by a line containing the original filename.
+
+  Args:
+    in_paths: List of input file paths.
+    out_path: Path to output file.
+  """
+  with open(out_path, 'w') as out_file:
+    for in_path in in_paths:
+      if not os.path.exists(in_path):
+        continue
+      with open(in_path, 'r') as in_file:
+        out_file.write('-- Contents of {}\n'.format(os.path.basename(in_path)))
+        out_file.write(in_file.read())
+
+
+def _LoadPathmap(pathmap_path):
+  """Load the pathmap of obfuscated resource paths.
+
+  Returns: A dict mapping from obfuscated paths to original paths or an
+           empty dict if passed a None |pathmap_path|.
+  """
+  if pathmap_path is None:
+    return {}
+
+  pathmap = {}
+  with open(pathmap_path, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if line.startswith('--') or line == '':
+        continue
+      original, renamed = line.split(' -> ')
+      pathmap[renamed] = original
+  return pathmap
+
+
+def _WriteBundlePathmap(module_pathmap_paths, module_names,
+                        bundle_pathmap_path):
+  """Combine the contents of module pathmaps into a bundle pathmap.
+
+  This rebases the resource paths inside the module pathmap before adding them
+  to the bundle pathmap. So res/a.xml inside the base module pathmap would be
+  base/res/a.xml in the bundle pathmap.
+  """
+  with open(bundle_pathmap_path, 'w') as bundle_pathmap_file:
+    for module_pathmap_path, module_name in zip(module_pathmap_paths,
+                                                module_names):
+      if not os.path.exists(module_pathmap_path):
+        continue
+      module_pathmap = _LoadPathmap(module_pathmap_path)
+      for short_path, long_path in module_pathmap.items():
+        rebased_long_path = '{}/{}'.format(module_name, long_path)
+        rebased_short_path = '{}/{}'.format(module_name, short_path)
+        line = '{} -> {}\n'.format(rebased_long_path, rebased_short_path)
+        bundle_pathmap_file.write(line)
+
+
+def _GetManifestForModule(bundle_path, module_name):
+  return ElementTree.fromstring(
+      bundletool.RunBundleTool([
+          'dump', 'manifest', '--bundle', bundle_path, '--module', module_name
+      ]))
+
+
+def _GetComponentNames(manifest, tag_name):
+  android_name = '{%s}name' % manifest_utils.ANDROID_NAMESPACE
+  return [s.attrib.get(android_name) for s in manifest.iter(tag_name)]
+
+
+def _MaybeCheckServicesAndProvidersPresentInBase(bundle_path, module_zips):
+  """Checks bundles with isolated splits define all services in the base module.
+
+  Due to b/169196314, service classes are not found if they are not present in
+  the base module. Providers are also checked because they are loaded early in
+  startup, and keeping them in the base module gives more time for the chrome
+  split to load.
+  """
+  base_manifest = _GetManifestForModule(bundle_path, 'base')
+  isolated_splits = base_manifest.get('{%s}isolatedSplits' %
+                                      manifest_utils.ANDROID_NAMESPACE)
+  if isolated_splits != 'true':
+    return
+
+  # Collect service names from all split manifests.
+  base_zip = None
+  service_names = _GetComponentNames(base_manifest, 'service')
+  provider_names = _GetComponentNames(base_manifest, 'provider')
+  for module_zip in module_zips:
+    name = os.path.basename(module_zip)[:-len('.zip')]
+    if name == 'base':
+      base_zip = module_zip
+    else:
+      service_names.extend(
+          _GetComponentNames(_GetManifestForModule(bundle_path, name),
+                             'service'))
+      module_providers = _GetComponentNames(
+          _GetManifestForModule(bundle_path, name), 'provider')
+      if module_providers:
+        raise Exception("Providers should all be declared in the base manifest."
+                        " '%s' module declared: %s" % (name, module_providers))
+
+  # Extract classes from the base module's dex.
+  classes = set()
+  base_package_name = manifest_utils.GetPackage(base_manifest)
+  for package in dexdump.Dump(base_zip):
+    for name, package_dict in package.items():
+      if not name:
+        name = base_package_name
+      classes.update('%s.%s' % (name, c)
+                     for c in package_dict['classes'].keys())
+
+  ignored_service_names = {
+      # Defined in the chime DFM manifest, but unused.
+      # org.chromium.chrome.browser.chime.ScheduledTaskService is used instead.
+      ("com.google.android.libraries.notifications.entrypoints.scheduled."
+       "ScheduledTaskService"),
+
+      # Defined in the chime DFM manifest, only used pre-O (where isolated
+      # splits are not supported).
+      ("com.google.android.libraries.notifications.executor.impl.basic."
+       "ChimeExecutorApiService"),
+  }
+
+  # Ensure all services are present in base module.
+  for service_name in service_names:
+    if service_name not in classes:
+      if service_name in ignored_service_names:
+        continue
+      raise Exception("Service %s should be present in the base module's dex."
+                      " See b/169196314 for more details." % service_name)
+
+  # Ensure all providers are present in base module.
+  for provider_name in provider_names:
+    if provider_name not in classes:
+      raise Exception(
+          "Provider %s should be present in the base module's dex." %
+          provider_name)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  split_dimensions = []
+  if options.split_dimensions:
+    split_dimensions = [x.upper() for x in options.split_dimensions]
+
+
+  with build_utils.TempDir() as tmp_dir:
+    module_zips = [
+        _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
+        for module in options.module_zips]
+
+    base_master_resource_ids = None
+    if options.base_module_rtxt_path:
+      base_master_resource_ids = _GenerateBaseResourcesAllowList(
+          options.base_module_rtxt_path, options.base_allowlist_rtxt_path)
+
+    bundle_config = _GenerateBundleConfigJson(
+        options.uncompressed_assets, options.compress_shared_libraries,
+        split_dimensions, base_master_resource_ids)
+
+    tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
+
+    # Important: bundletool requires that the bundle config file is
+    # named with a .pb.json extension.
+    tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'
+
+    with open(tmp_bundle_config, 'w') as f:
+      f.write(bundle_config)
+
+    cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
+        '-jar',
+        bundletool.BUNDLETOOL_JAR_PATH,
+        'build-bundle',
+        '--modules=' + ','.join(module_zips),
+        '--output=' + tmp_bundle,
+        '--config=' + tmp_bundle_config,
+    ]
+
+    build_utils.CheckOutput(
+        cmd_args,
+        print_stdout=True,
+        print_stderr=True,
+        stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+        fail_on_output=options.warnings_as_errors)
+
+    if options.validate_services:
+      # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with
+      # isolated splits disabled and 2s for bundles with isolated splits
+      # enabled.  Consider making this run in parallel or move into a separate
+      # step before enabling isolated splits by default.
+      _MaybeCheckServicesAndProvidersPresentInBase(tmp_bundle, module_zips)
+
+    shutil.move(tmp_bundle, options.out_bundle)
+
+  if options.rtxt_out_path:
+    _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)
+
+  if options.pathmap_out_path:
+    _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
+                        options.pathmap_out_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/create_app_bundle.pydeps b/src/build/android/gyp/create_app_bundle.pydeps
new file mode 100644
index 0000000..cbb471a
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle.pydeps
@@ -0,0 +1,48 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/dexdump.py
+bundletool.py
+create_app_bundle.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_app_bundle_apks.py b/src/build/android/gyp/create_app_bundle_apks.py
new file mode 100755
index 0000000..5950696
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle_apks.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an .apks from an .aab."""
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import app_bundle_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument(
+      '--bundle', required=True, help='Path to input .aab file.')
+  parser.add_argument(
+      '--output', required=True, help='Path to output .apks file.')
+  parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.')
+  parser.add_argument(
+      '--keystore-path', required=True, help='Path to keystore.')
+  parser.add_argument(
+      '--keystore-password', required=True, help='Keystore password.')
+  parser.add_argument(
+      '--keystore-name', required=True, help='Key name within keystore')
+  parser.add_argument(
+      '--minimal',
+      action='store_true',
+      help='Create APKs archive with minimal language support.')
+
+  args = parser.parse_args()
+
+  app_bundle_utils.GenerateBundleApks(
+      args.bundle,
+      args.output,
+      args.aapt2_path,
+      args.keystore_path,
+      args.keystore_password,
+      args.keystore_name,
+      minimal=args.minimal,
+      check_for_noop=False)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/create_app_bundle_apks.pydeps b/src/build/android/gyp/create_app_bundle_apks.pydeps
new file mode 100644
index 0000000..20d8ffe
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle_apks.pydeps
@@ -0,0 +1,36 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+../pylib/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+bundletool.py
+create_app_bundle_apks.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_bundle_wrapper_script.py b/src/build/android/gyp/create_bundle_wrapper_script.py
new file mode 100755
index 0000000..282e206
--- /dev/null
+++ b/src/build/android/gyp/create_bundle_wrapper_script.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a wrapper script to manage an Android App Bundle."""
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
+
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+  resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+      script_directory, p))
+  sys.path.append(resolve(${WRAPPED_SCRIPT_DIR}))
+  import apk_operations
+
+  additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APK_PATHS}]
+  apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}),
+                              bundle_path=resolve(${BUNDLE_PATH}),
+                              bundle_apks_path=resolve(${BUNDLE_APKS_PATH}),
+                              additional_apk_paths=additional_apk_paths,
+                              aapt2_path=resolve(${AAPT2_PATH}),
+                              keystore_path=resolve(${KEYSTORE_PATH}),
+                              keystore_password=${KEYSTORE_PASSWORD},
+                              keystore_alias=${KEY_NAME},
+                              package_name=${PACKAGE_NAME},
+                              command_line_flags_file=${FLAGS_FILE},
+                              proguard_mapping_path=resolve(${MAPPING_PATH}),
+                              target_cpu=${TARGET_CPU},
+                              system_image_locales=${SYSTEM_IMAGE_LOCALES},
+                              default_modules=${DEFAULT_MODULES})
+
+if __name__ == '__main__':
+  sys.exit(main())
+""")
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path', required=True,
+                      help='Output path for executable script.')
+  parser.add_argument('--bundle-path', required=True)
+  parser.add_argument('--bundle-apks-path', required=True)
+  parser.add_argument(
+      '--additional-apk-path',
+      action='append',
+      dest='additional_apk_paths',
+      default=[],
+      help='Paths to APKs to be installed prior to --apk-path.')
+  parser.add_argument('--package-name', required=True)
+  parser.add_argument('--aapt2-path', required=True)
+  parser.add_argument('--keystore-path', required=True)
+  parser.add_argument('--keystore-password', required=True)
+  parser.add_argument('--key-name', required=True)
+  parser.add_argument('--command-line-flags-file')
+  parser.add_argument('--proguard-mapping-path')
+  parser.add_argument('--target-cpu')
+  parser.add_argument('--system-image-locales')
+  parser.add_argument('--default-modules', nargs='*', default=[])
+  args = parser.parse_args(args)
+
+  def relativize(path):
+    """Returns the path relative to the output script directory."""
+    if path is None:
+      return path
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+  wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+  wrapped_script_dir = relativize(wrapped_script_dir)
+  with open(args.script_output_path, 'w') as script:
+    script_dict = {
+        'WRAPPED_SCRIPT_DIR':
+        repr(wrapped_script_dir),
+        'OUTPUT_DIR':
+        repr(relativize('.')),
+        'BUNDLE_PATH':
+        repr(relativize(args.bundle_path)),
+        'BUNDLE_APKS_PATH':
+        repr(relativize(args.bundle_apks_path)),
+        'ADDITIONAL_APK_PATHS':
+        [relativize(p) for p in args.additional_apk_paths],
+        'PACKAGE_NAME':
+        repr(args.package_name),
+        'AAPT2_PATH':
+        repr(relativize(args.aapt2_path)),
+        'KEYSTORE_PATH':
+        repr(relativize(args.keystore_path)),
+        'KEYSTORE_PASSWORD':
+        repr(args.keystore_password),
+        'KEY_NAME':
+        repr(args.key_name),
+        'MAPPING_PATH':
+        repr(relativize(args.proguard_mapping_path)),
+        'FLAGS_FILE':
+        repr(args.command_line_flags_file),
+        'TARGET_CPU':
+        repr(args.target_cpu),
+        'SYSTEM_IMAGE_LOCALES':
+        repr(build_utils.ParseGnList(args.system_image_locales)),
+        'DEFAULT_MODULES':
+        repr(args.default_modules),
+    }
+    script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+  os.chmod(args.script_output_path, 0o750)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_bundle_wrapper_script.pydeps b/src/build/android/gyp/create_bundle_wrapper_script.pydeps
new file mode 100644
index 0000000..7758ed6
--- /dev/null
+++ b/src/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../gn_helpers.py
+create_bundle_wrapper_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_java_binary_script.py b/src/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000..5bc9d08
--- /dev/null
+++ b/src/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import argparse
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+extra_program_args = {extra_program_args}
+java_path = {java_path}
+if os.getcwd() != self_dir:
+  offset = os.path.relpath(self_dir, os.getcwd())
+  fix_path = lambda p: os.path.normpath(os.path.join(offset, p))
+  classpath = [fix_path(p) for p in classpath]
+  java_path = fix_path(java_path)
+java_cmd = [java_path]
+# This is a simple argparser for jvm, jar, and classpath arguments.
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument('--jar-args')
+parser.add_argument('--jvm-args')
+parser.add_argument('--classpath')
+# Test_runner parses the classpath for sharding junit tests.
+parser.add_argument('--print-classpath', action='store_true',
+                    help='Prints the classpass. Used by test_runner.')
+known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+
+if known_args.print_classpath:
+  sys.stdout.write(':'.join(classpath))
+  sys.exit(0)
+
+if known_args.jvm_args:
+  jvm_arguments = known_args.jvm_args.strip('"').split()
+  java_cmd.extend(jvm_arguments)
+if known_args.jar_args:
+  jar_arguments = known_args.jar_args.strip('"').split()
+  if unknown_args:
+    raise Exception('There are unknown arguments')
+else:
+  jar_arguments = unknown_args
+
+if known_args.classpath:
+  classpath += [known_args.classpath]
+
+{extra_flags}
+java_cmd.extend(
+    ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(jar_arguments)
+os.execvp(java_cmd[0], java_cmd)
+"""
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = optparse.OptionParser()
+  parser.add_option('--output', help='Output path for executable script.')
+  parser.add_option('--main-class',
+      help='Name of the java class with the "main" entry point.')
+  parser.add_option('--classpath', action='append', default=[],
+      help='Classpath for running the jar.')
+  parser.add_option('--noverify', action='store_true',
+      help='JVM flag: noverify.')
+  parser.add_option('--tiered-stop-at-level-one',
+                    action='store_true',
+                    help='JVM flag: -XX:TieredStopAtLevel=1.')
+
+  options, extra_program_args = parser.parse_args(argv)
+
+  extra_flags = []
+  if options.noverify:
+    extra_flags.append('java_cmd.append("-noverify")')
+  if options.tiered_stop_at_level_one:
+    extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")')
+
+  classpath = []
+  for cp_arg in options.classpath:
+    classpath += build_utils.ParseGnList(cp_arg)
+
+  run_dir = os.path.dirname(options.output)
+  classpath = [os.path.relpath(p, run_dir) for p in classpath]
+  java_path = os.path.relpath(
+      os.path.join(build_utils.JAVA_HOME, 'bin', 'java'), run_dir)
+
+  with build_utils.AtomicOutput(options.output, mode='w') as script:
+    script.write(
+        script_template.format(classpath=('"%s"' % '", "'.join(classpath)),
+                               java_path=repr(java_path),
+                               main_class=options.main_class,
+                               extra_program_args=repr(extra_program_args),
+                               extra_flags='\n'.join(extra_flags)))
+
+  os.chmod(options.output, 0o750)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_java_binary_script.pydeps b/src/build/android/gyp/create_java_binary_script.pydeps
new file mode 100644
index 0000000..6bc21fa
--- /dev/null
+++ b/src/build/android/gyp/create_java_binary_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../gn_helpers.py
+create_java_binary_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_r_java.py b/src/build/android/gyp/create_r_java.py
new file mode 100755
index 0000000..97e512d
--- /dev/null
+++ b/src/build/android/gyp/create_r_java.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.java file from a list of R.txt files."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+
+def _ConcatRTxts(rtxt_in_paths, combined_out_path):
+  all_lines = set()
+  for rtxt_in_path in rtxt_in_paths:
+    with open(rtxt_in_path) as rtxt_in:
+      all_lines.update(rtxt_in.read().splitlines())
+  with open(combined_out_path, 'w') as combined_out:
+    combined_out.write('\n'.join(sorted(all_lines)))
+
+
+def _CreateRJava(rtxts, package_name, srcjar_out):
+  with resource_utils.BuildContext() as build:
+    _ConcatRTxts(rtxts, build.r_txt_path)
+    rjava_build_options = resource_utils.RJavaBuildOptions()
+    rjava_build_options.ExportAllResources()
+    rjava_build_options.ExportAllStyleables()
+    rjava_build_options.GenerateOnResourcesLoaded(fake=True)
+    resource_utils.CreateRJavaFiles(build.srcjar_dir,
+                                    package_name,
+                                    build.r_txt_path,
+                                    extra_res_packages=[],
+                                    rjava_build_options=rjava_build_options,
+                                    srcjar_out=srcjar_out,
+                                    ignore_mismatched_values=True)
+    build_utils.ZipDir(srcjar_out, build.srcjar_dir)
+
+
+def main(args):
+  parser = argparse.ArgumentParser(description='Create an R.java srcjar.')
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--srcjar-out',
+                      required=True,
+                      help='Path to output srcjar.')
+  parser.add_argument('--deps-rtxts',
+                      required=True,
+                      help='List of rtxts of resource dependencies.')
+  parser.add_argument('--r-package',
+                      required=True,
+                      help='R.java package to use.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  options.deps_rtxts = build_utils.ParseGnList(options.deps_rtxts)
+
+  _CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out)
+  build_utils.WriteDepfile(options.depfile,
+                           options.srcjar_out,
+                           inputs=options.deps_rtxts)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_r_java.pydeps b/src/build/android/gyp/create_r_java.pydeps
new file mode 100644
index 0000000..45121e3
--- /dev/null
+++ b/src/build/android/gyp/create_r_java.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_java.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_r_txt.py b/src/build/android/gyp/create_r_txt.py
new file mode 100755
index 0000000..2adde5d
--- /dev/null
+++ b/src/build/android/gyp/create_r_txt.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.txt file from a resource zip."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+from util import resources_parser
+
+
+def main(args):
+  parser = argparse.ArgumentParser(
+      description='Create an R.txt from resources.')
+  parser.add_argument('--resources-zip-path',
+                      required=True,
+                      help='Path to input resources zip.')
+  parser.add_argument('--rtxt-path',
+                      required=True,
+                      help='Path to output R.txt file.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  with build_utils.TempDir() as temp:
+    dep_subdirs = resource_utils.ExtractDeps([options.resources_zip_path], temp)
+    resources_parser.RTxtGenerator(dep_subdirs).WriteRTxtFile(options.rtxt_path)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_r_txt.pydeps b/src/build/android/gyp/create_r_txt.pydeps
new file mode 100644
index 0000000..c7698ee
--- /dev/null
+++ b/src/build/android/gyp/create_r_txt.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_txt.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/src/build/android/gyp/create_size_info_files.py b/src/build/android/gyp/create_size_info_files.py
new file mode 100755
index 0000000..c60b02d
--- /dev/null
+++ b/src/build/android/gyp/create_size_info_files.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python3
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates size-info/*.info files used by SuperSize."""
+
+import argparse
+import collections
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+
+
+_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)')
+
+
+def _RemoveDuplicatesFromList(source_list):
+  return collections.OrderedDict.fromkeys(source_list).keys()
+
+
+def _TransformAarPaths(path):
+  # .aar files within //third_party/android_deps have a version suffix.
+  # The suffix changes each time .aar files are updated, which makes size diffs
+  # hard to compare (since the before/after have different source paths).
+  # Rather than changing how android_deps works, we employ this work-around
+  # to normalize the paths.
+  # From: .../androidx_appcompat_appcompat/appcompat-1.1.0.aar/res/...
+  #   To: .../androidx_appcompat_appcompat.aar/res/...
+  # https://crbug.com/1056455
+  if 'android_deps' not in path:
+    return path
+  return _AAR_VERSION_PATTERN.sub(r'\1', path)
+
+
+def _MergeResInfoFiles(res_info_path, info_paths):
+  # Concatenate them all.
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(res_info_path, only_if_changed=False,
+                                mode='w+') as dst:
+    for p in info_paths:
+      with open(p) as src:
+        dst.writelines(_TransformAarPaths(l) for l in src)
+
+
+def _PakInfoPathsForAssets(assets):
+  return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')]
+
+
+def _MergePakInfoFiles(merged_path, pak_infos):
+  info_lines = set()
+  for pak_info_path in pak_infos:
+    with open(pak_info_path, 'r') as src_info_file:
+      info_lines.update(_TransformAarPaths(x) for x in src_info_file)
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(merged_path, only_if_changed=False,
+                                mode='w+') as f:
+    f.writelines(sorted(info_lines))
+
+
+def _FullJavaNameFromClassFilePath(path):
+  # Input:  base/android/java/src/org/chromium/Foo.class
+  # Output: base.android.java.src.org.chromium.Foo
+  if not path.endswith('.class'):
+    return ''
+  path = os.path.splitext(path)[0]
+  parts = []
+  while path:
+    # Use split to be platform independent.
+    head, tail = os.path.split(path)
+    path = head
+    parts.append(tail)
+  parts.reverse()  # Package comes first
+  return '.'.join(parts)
+
+
+def _MergeJarInfoFiles(output, inputs):
+  """Merge several .jar.info files to generate an .apk.jar.info.
+
+  Args:
+    output: output file path.
+    inputs: List of .jar.info or .jar files.
+  """
+  info_data = dict()
+  for path in inputs:
+    # For non-prebuilts: .jar.info files are written by compile_java.py and map
+    # .class files to .java source paths.
+    #
+    # For prebuilts: No .jar.info file exists, we scan the .jar files here and
+    # map .class files to the .jar.
+    #
+    # For .aar files: We look for a "source.info" file in the containing
+    # directory in order to map classes back to the .aar (rather than mapping
+    # them to the extracted .jar file).
+    if path.endswith('.info'):
+      info_data.update(jar_info_utils.ParseJarInfoFile(path))
+    else:
+      attributed_path = path
+      if not path.startswith('..'):
+        parent_path = os.path.dirname(path)
+        # See if it's an sub-jar within the .aar.
+        if os.path.basename(parent_path) == 'libs':
+          parent_path = os.path.dirname(parent_path)
+        aar_source_info_path = os.path.join(parent_path, 'source.info')
+        # source.info files exist only for jars from android_aar_prebuilt().
+        # E.g. Could have an java_prebuilt() pointing to a generated .jar.
+        if os.path.exists(aar_source_info_path):
+          attributed_path = jar_info_utils.ReadAarSourceInfo(
+              aar_source_info_path)
+
+      with zipfile.ZipFile(path) as zip_info:
+        for name in zip_info.namelist():
+          fully_qualified_name = _FullJavaNameFromClassFilePath(name)
+          if fully_qualified_name:
+            info_data[fully_qualified_name] = _TransformAarPaths('{}/{}'.format(
+                attributed_path, name))
+
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+    jar_info_utils.WriteJarInfoFile(f, info_data)
+
+
+def _FindJarInputs(jar_paths):
+  ret = []
+  for jar_path in jar_paths:
+    jar_info_path = jar_path + '.info'
+    if os.path.exists(jar_info_path):
+      ret.append(jar_info_path)
+    else:
+      ret.append(jar_path)
+  return ret
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--jar-info-path', required=True, help='Output .jar.info file')
+  parser.add_argument(
+      '--pak-info-path', required=True, help='Output .pak.info file')
+  parser.add_argument(
+      '--res-info-path', required=True, help='Output .res.info file')
+  parser.add_argument(
+      '--jar-files',
+      required=True,
+      action='append',
+      help='GN-list of .jar file paths')
+  parser.add_argument(
+      '--assets',
+      required=True,
+      action='append',
+      help='GN-list of files to add as assets in the form '
+      '"srcPath:zipPath", where ":zipPath" is optional.')
+  parser.add_argument(
+      '--uncompressed-assets',
+      required=True,
+      action='append',
+      help='Same as --assets, except disables compression.')
+  parser.add_argument(
+      '--in-res-info-path',
+      required=True,
+      action='append',
+      help='Paths to .ap_.info files')
+
+  options = parser.parse_args(args)
+
+  options.jar_files = build_utils.ParseGnList(options.jar_files)
+  options.assets = build_utils.ParseGnList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGnList(
+      options.uncompressed_assets)
+
+  jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files))
+  pak_inputs = _PakInfoPathsForAssets(options.assets +
+                                      options.uncompressed_assets)
+  res_inputs = options.in_res_info_path
+
+  # Just create the info files every time. See https://crbug.com/1045024
+  _MergeJarInfoFiles(options.jar_info_path, jar_inputs)
+  _MergePakInfoFiles(options.pak_info_path, pak_inputs)
+  _MergeResInfoFiles(options.res_info_path, res_inputs)
+
+  all_inputs = jar_inputs + pak_inputs + res_inputs
+  build_utils.WriteDepfile(options.depfile,
+                           options.jar_info_path,
+                           inputs=all_inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/create_size_info_files.pydeps b/src/build/android/gyp/create_size_info_files.pydeps
new file mode 100644
index 0000000..1a69c55
--- /dev/null
+++ b/src/build/android/gyp/create_size_info_files.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../gn_helpers.py
+create_size_info_files.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
diff --git a/src/build/android/gyp/create_ui_locale_resources.py b/src/build/android/gyp/create_ui_locale_resources.py
new file mode 100755
index 0000000..772dab7
--- /dev/null
+++ b/src/build/android/gyp/create_ui_locale_resources.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a zip archive containing localized locale name Android resource
+strings!
+
+This script takes a list of input Chrome-specific locale names, as well as an
+output zip file path.
+
+Each output file will contain the definition of a single string resource,
+named 'current_locale', whose value will be the matching Chromium locale name.
+E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+sys.path.insert(
+    0,
+    os.path.join(
+        os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+# A small string template for the content of each strings.xml file.
+# NOTE: The name is chosen to avoid any conflicts with other string defined
+# by other resource archives.
+_TEMPLATE = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="current_detected_ui_locale_name">{resource_text}</string>
+</resources>
+"""
+
+# The default Chrome locale value.
+_DEFAULT_CHROME_LOCALE = 'en-US'
+
+
+def _GenerateLocaleStringsXml(locale):
+  return _TEMPLATE.format(resource_text=locale)
+
+
+def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
+  locale_data = _GenerateLocaleStringsXml(locale)
+  if android_locale:
+    zip_path = 'values-%s/strings.xml' % android_locale
+  else:
+    zip_path = 'values/strings.xml'
+  build_utils.AddToZipHermetic(
+      out_zip, zip_path, data=locale_data, compress=False)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  parser.add_argument(
+      '--locale-list',
+      required=True,
+      help='GN-list of Chrome-specific locale names.')
+  parser.add_argument(
+      '--output-zip', required=True, help='Output zip archive path.')
+
+  args = parser.parse_args()
+
+  locale_list = build_utils.ParseGnList(args.locale_list)
+  if not locale_list:
+    raise Exception('Locale list cannot be empty!')
+
+  with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+    with zipfile.ZipFile(tmp_file, 'w') as out_zip:
+      # First, write the default value, since aapt requires one.
+      _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
+
+      for locale in locale_list:
+        android_locale = resource_utils.ToAndroidLocaleName(locale)
+        _AddLocaleResourceFileToZip(out_zip, android_locale, locale)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/create_ui_locale_resources.pydeps b/src/build/android/gyp/create_ui_locale_resources.pydeps
new file mode 100644
index 0000000..6bb98dd
--- /dev/null
+++ b/src/build/android/gyp/create_ui_locale_resources.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_ui_locale_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/desugar.py b/src/build/android/gyp/desugar.py
new file mode 100755
index 0000000..87eb159
--- /dev/null
+++ b/src/build/android/gyp/desugar.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--desugar-jar', required=True,
+                      help='Path to Desugar.jar.')
+  parser.add_argument('--input-jar', required=True,
+                      help='Jar input path to include .class files from.')
+  parser.add_argument('--output-jar', required=True,
+                      help='Jar output path.')
+  parser.add_argument('--classpath',
+                      action='append',
+                      required=True,
+                      help='Classpath.')
+  parser.add_argument('--bootclasspath', required=True,
+                      help='Path to javac bootclasspath interface jar.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options = parser.parse_args(args)
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+
+  cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+      '-jar',
+      options.desugar_jar,
+      '--input',
+      options.input_jar,
+      '--output',
+      options.output_jar,
+      '--generate_base_classes_for_default_methods',
+      # Don't include try-with-resources files in every .jar. Instead, they
+      # are included via //third_party/bazel/desugar:desugar_runtime_java.
+      '--desugar_try_with_resources_omit_runtime_classes',
+  ]
+  for path in options.bootclasspath:
+    cmd += ['--bootclasspath_entry', path]
+  for path in options.classpath:
+    cmd += ['--classpath_entry', path]
+  build_utils.CheckOutput(
+      cmd,
+      print_stdout=False,
+      stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+      fail_on_output=options.warnings_as_errors)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             options.output_jar,
+                             inputs=options.bootclasspath + options.classpath)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/desugar.pydeps b/src/build/android/gyp/desugar.pydeps
new file mode 100644
index 0000000..3e5c9ea
--- /dev/null
+++ b/src/build/android/gyp/desugar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
+../../gn_helpers.py
+desugar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dex.py b/src/build/android/gyp/dex.py
new file mode 100755
index 0000000..9664922
--- /dev/null
+++ b/src/build/android/gyp/dex.py
@@ -0,0 +1,644 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import zipalign
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
+
+import convert_dex_profile
+
+
+_IGNORE_WARNINGS = (
+    # Caused by Play Services:
+    r'Type `libcore.io.Memory` was not found',
+    # Caused by a missing final class in flogger:
+    r'Type `dalvik.system.VMStack` was not found',
+    # Caused by jacoco code coverage:
+    r'Type `java.lang.management.ManagementFactory` was not found',
+    # TODO(wnwen): Remove this after R8 version 3.0.26-dev:
+    r'Missing class sun.misc.Unsafe',
+    # Caused when the test apk and the apk under test do not having native libs.
+    r'Missing class org.chromium.build.NativeLibraries',
+    # Caused by internal annotation: https://crbug.com/1180222
+    r'Missing class com.google.errorprone.annotations.RestrictedInheritance',
+    # Caused by internal protobuf package: https://crbug.com/1183971
+    r'referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension',  # pylint: disable=line-too-long
+    # Caused by using Bazel desugar instead of D8 for desugar, since Bazel
+    # desugar doesn't preserve interfaces in the same way. This should be
+    # removed when D8 is used for desugaring.
+    r'Warning: Cannot emulate interface ',
+)
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--output', required=True, help='Dex output path.')
+  parser.add_argument(
+      '--class-inputs',
+      action='append',
+      help='GN-list of .jars with .class files.')
+  parser.add_argument(
+      '--class-inputs-filearg',
+      action='append',
+      help='GN-list of .jars with .class files (added to depfile).')
+  parser.add_argument(
+      '--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
+  parser.add_argument(
+      '--dex-inputs-filearg',
+      action='append',
+      help='GN-list of .jars with .dex files (added to depfile).')
+  parser.add_argument(
+      '--incremental-dir',
+      help='Path of directory to put intermediate dex files.')
+  parser.add_argument('--main-dex-rules-path',
+                      action='append',
+                      help='Path to main dex rules for multidex.')
+  parser.add_argument(
+      '--multi-dex',
+      action='store_true',
+      help='Allow multiple dex files within output.')
+  parser.add_argument('--library',
+                      action='store_true',
+                      help='Allow numerous dex files within output.')
+  parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
+  parser.add_argument('--skip-custom-d8',
+                      action='store_true',
+                      help='When rebuilding the CustomD8 jar, this may be '
+                      'necessary to avoid incompatibility with the new r8 '
+                      'jar.')
+  parser.add_argument('--custom-d8-jar-path',
+                      required=True,
+                      help='Path to our customized d8 jar.')
+  parser.add_argument('--desugar-dependencies',
+                      help='Path to store desugar dependencies.')
+  parser.add_argument('--desugar', action='store_true')
+  parser.add_argument(
+      '--bootclasspath',
+      action='append',
+      help='GN-list of bootclasspath. Needed for --desugar')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument('--show-desugar-default-interface-warnings',
+                      action='store_true',
+                      help='Enable desugaring warnings.')
+  parser.add_argument(
+      '--classpath',
+      action='append',
+      help='GN-list of full classpath. Needed for --desugar')
+  parser.add_argument(
+      '--release',
+      action='store_true',
+      help='Run D8 in release mode. Release mode maximises main dex and '
+      'deletes non-essential line number information (vs debug which minimizes '
+      'main dex and keeps all line number information, and then some.')
+  parser.add_argument(
+      '--min-api', help='Minimum Android API level compatibility.')
+  parser.add_argument('--force-enable-assertions',
+                      action='store_true',
+                      help='Forcefully enable javac generated assertion code.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--dump-inputs',
+                      action='store_true',
+                      help='Use when filing D8 bugs to capture inputs.'
+                      ' Stores inputs to d8inputs.zip')
+
+  group = parser.add_argument_group('Dexlayout')
+  group.add_argument(
+      '--dexlayout-profile',
+      help=('Text profile for dexlayout. If present, a dexlayout '
+            'pass will happen'))
+  group.add_argument(
+      '--profman-path',
+      help=('Path to ART profman binary. There should be a lib/ directory at '
+            'the same path with shared libraries (shared with dexlayout).'))
+  group.add_argument(
+      '--dexlayout-path',
+      help=('Path to ART dexlayout binary. There should be a lib/ directory at '
+            'the same path with shared libraries (shared with dexlayout).'))
+  group.add_argument('--dexdump-path', help='Path to dexdump binary.')
+  group.add_argument(
+      '--proguard-mapping-path',
+      help=('Path to proguard map from obfuscated symbols in the jar to '
+            'unobfuscated symbols present in the code. If not present, the jar '
+            'is assumed not to be obfuscated.'))
+
+  options = parser.parse_args(args)
+
+  if options.dexlayout_profile:
+    build_utils.CheckOptions(
+        options,
+        parser,
+        required=('profman_path', 'dexlayout_path', 'dexdump_path'))
+  elif options.proguard_mapping_path is not None:
+    parser.error('Unexpected proguard mapping without dexlayout')
+
+  if options.main_dex_rules_path and not options.multi_dex:
+    parser.error('--main-dex-rules-path is unused if multidex is not enabled')
+
+  options.class_inputs = build_utils.ParseGnList(options.class_inputs)
+  options.class_inputs_filearg = build_utils.ParseGnList(
+      options.class_inputs_filearg)
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
+  options.dex_inputs_filearg = build_utils.ParseGnList(
+      options.dex_inputs_filearg)
+
+  return options
+
+
+def CreateStderrFilter(show_desugar_default_interface_warnings):
+  def filter_stderr(output):
+    patterns = list(_IGNORE_WARNINGS)
+
+    # When using Bazel's Desugar tool to desugar lambdas and interface methods,
+    # we do not provide D8 with a classpath, which causes a lot of warnings from
+    # D8's default interface desugaring pass. Not having a classpath makes
+    # incremental dexing much more effective. D8 still does backported method
+    # desugaring.
+    # These warnings are also turned off when bytecode checks are turned off.
+    if not show_desugar_default_interface_warnings:
+      patterns += ['default or static interface methods']
+
+    combined_pattern = '|'.join(re.escape(p) for p in patterns)
+    output = build_utils.FilterLines(output, combined_pattern)
+
+    # Each warning has a prefix line of the file it's from. If we've filtered
+    # out the warning, then also filter out the file header.
+    # E.g.:
+    # Warning in path/to/Foo.class:
+    #   Error message #1 indented here.
+    #   Error message #2 indented here.
+    output = re.sub(r'^Warning in .*?:\n(?!  )', '', output, flags=re.MULTILINE)
+    return output
+
+  return filter_stderr
+
+
+def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors,
+           show_desugar_default_interface_warnings):
+  dex_cmd = dex_cmd + ['--output', output_path] + input_paths
+
+  stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings)
+
+  with tempfile.NamedTemporaryFile(mode='w') as flag_file:
+    # Chosen arbitrarily. Needed to avoid command-line length limits.
+    MAX_ARGS = 50
+    if len(dex_cmd) > MAX_ARGS:
+      flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
+      flag_file.flush()
+      dex_cmd = dex_cmd[:MAX_ARGS]
+      dex_cmd.append('@' + flag_file.name)
+
+    # stdout sometimes spams with things like:
+    # Stripped invalid locals information from 1 method.
+    build_utils.CheckOutput(dex_cmd,
+                            stderr_filter=stderr_filter,
+                            fail_on_output=warnings_as_errors)
+
+
+def _EnvWithArtLibPath(binary_path):
+  """Return an environment dictionary for ART host shared libraries.
+
+  Args:
+    binary_path: the path to an ART host binary.
+
+  Returns:
+    An environment dictionary where LD_LIBRARY_PATH has been augmented with the
+    shared library path for the binary. This assumes that there is a lib/
+    directory in the same location as the binary.
+  """
+  lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
+  env = os.environ.copy()
+  libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
+  libraries.append(lib_path)
+  env['LD_LIBRARY_PATH'] = ':'.join(libraries)
+  return env
+
+
+def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
+  """Create a binary profile for dexlayout.
+
+  Args:
+    text_profile: The ART text profile that will be converted to a binary
+        profile.
+    input_dex: The input dex file to layout.
+    profman_path: Path to the profman binary.
+    temp_dir: Directory to work in.
+
+  Returns:
+    The name of the binary profile, which will live in temp_dir.
+  """
+  binary_profile = os.path.join(
+      temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
+  open(binary_profile, 'w').close()  # Touch binary_profile.
+  profman_cmd = [profman_path,
+                 '--apk=' + input_dex,
+                 '--dex-location=' + input_dex,
+                 '--create-profile-from=' + text_profile,
+                 '--reference-profile-file=' + binary_profile]
+  build_utils.CheckOutput(
+    profman_cmd,
+    env=_EnvWithArtLibPath(profman_path),
+    stderr_filter=lambda output:
+        build_utils.FilterLines(output, '|'.join(
+            [r'Could not find (method_id|proto_id|name):',
+             r'Could not create type list'])))
+  return binary_profile
+
+
+def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
+  """Layout a dexfile using a profile.
+
+  Args:
+    binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
+    input_dex: The dex file used to create the binary profile.
+    dexlayout_path: Path to the dexlayout binary.
+    temp_dir: Directory to work in.
+
+  Returns:
+    List of output files produced by dexlayout. This will be one if the input
+    was a single dexfile, or multiple files if the input was a multidex
+    zip. These output files are located in temp_dir.
+  """
+  dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
+  os.mkdir(dexlayout_output_dir)
+  dexlayout_cmd = [ dexlayout_path,
+                    '-u',  # Update checksum
+                    '-p', binary_profile,
+                    '-w', dexlayout_output_dir,
+                    input_dex ]
+  build_utils.CheckOutput(
+      dexlayout_cmd,
+      env=_EnvWithArtLibPath(dexlayout_path),
+      stderr_filter=lambda output:
+          build_utils.FilterLines(output,
+                                  r'Can.t mmap dex file.*please zipalign'))
+  output_files = os.listdir(dexlayout_output_dir)
+  if not output_files:
+    raise Exception('dexlayout unexpectedly produced no output')
+  return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files])
+
+
+def _ZipMultidex(file_dir, dex_files):
+  """Zip dex files into a multidex.
+
+  Args:
+    file_dir: The directory into which to write the output.
+    dex_files: The dexfiles forming the multizip. Their names must end with
+      classes.dex, classes2.dex, ...
+
+  Returns:
+    The name of the multidex file, which will live in file_dir.
+  """
+  ordered_files = []  # List of (archive name, file name)
+  for f in dex_files:
+    if f.endswith('dex.jar'):
+      ordered_files.append(('classes.dex', f))
+      break
+  if not ordered_files:
+    raise Exception('Could not find classes.dex multidex file in %s',
+                    dex_files)
+  for dex_idx in xrange(2, len(dex_files) + 1):
+    archive_name = 'classes%d.dex' % dex_idx
+    for f in dex_files:
+      if f.endswith(archive_name):
+        ordered_files.append((archive_name, f))
+        break
+    else:
+      raise Exception('Could not find classes%d.dex multidex file in %s',
+                      dex_files)
+  if len(set(f[1] for f in ordered_files)) != len(ordered_files):
+    raise Exception('Unexpected clashing filenames for multidex in %s',
+                    dex_files)
+
+  zip_name = os.path.join(file_dir, 'multidex_classes.zip')
+  build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
+                     for archive_name, file_name in ordered_files),
+                    zip_name)
+  return zip_name
+
+
+def _ZipAligned(dex_files, output_path):
+  """Creates a .dex.jar with 4-byte aligned files.
+
+  Args:
+    dex_files: List of dex files.
+    output_path: The output file in which to write the zip.
+  """
+  with zipfile.ZipFile(output_path, 'w') as z:
+    for i, dex_file in enumerate(dex_files):
+      name = 'classes{}.dex'.format(i + 1 if i > 0 else '')
+      zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
+
+
+def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
+  if options.proguard_mapping_path is not None:
+    matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
+    convert_dex_profile.ObfuscateProfile(
+        options.dexlayout_profile, tmp_dex_output,
+        options.proguard_mapping_path, options.dexdump_path, matching_profile)
+  else:
+    logging.warning('No obfuscation for %s', options.dexlayout_profile)
+    matching_profile = options.dexlayout_profile
+  binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
+                                        options.profman_path, tmp_dir)
+  output_files = _LayoutDex(binary_profile, tmp_dex_output,
+                            options.dexlayout_path, tmp_dir)
+  if len(output_files) > 1:
+    return _ZipMultidex(tmp_dir, output_files)
+
+  if zipfile.is_zipfile(output_files[0]):
+    return output_files[0]
+
+  final_output = os.path.join(tmp_dir, 'dex_classes.zip')
+  _ZipAligned(output_files, final_output)
+  return final_output
+
+
+def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
+  tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
+  needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
+  needs_dexmerge = output.endswith('.dex') or not (options and options.library)
+  if needs_dexing or needs_dexmerge:
+    if options and options.main_dex_rules_path:
+      for main_dex_rule in options.main_dex_rules_path:
+        dex_cmd = dex_cmd + ['--main-dex-rules', main_dex_rule]
+
+    tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
+    os.mkdir(tmp_dex_dir)
+
+    _RunD8(dex_cmd, d8_inputs, tmp_dex_dir,
+           (not options or options.warnings_as_errors),
+           (options and options.show_desugar_default_interface_warnings))
+    logging.debug('Performed dex merging')
+
+    dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
+
+    if output.endswith('.dex'):
+      if len(dex_files) > 1:
+        raise Exception('%d files created, expected 1' % len(dex_files))
+      tmp_dex_output = dex_files[0]
+    else:
+      _ZipAligned(sorted(dex_files), tmp_dex_output)
+  else:
+    # Skip dexmerger. Just put all incrementals into the .jar individually.
+    _ZipAligned(sorted(d8_inputs), tmp_dex_output)
+    logging.debug('Quick-zipped %d files', len(d8_inputs))
+
+  if options and options.dexlayout_profile:
+    tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+
+  # The dex file is complete and can be moved out of tmp_dir.
+  shutil.move(tmp_dex_output, output)
+
+
+def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
+  """Returns a list of all intermediate dex file paths."""
+  dex_files = []
+  for jar in class_inputs:
+    with zipfile.ZipFile(jar, 'r') as z:
+      for subpath in z.namelist():
+        if subpath.endswith('.class'):
+          subpath = subpath[:-5] + 'dex'
+          dex_files.append(os.path.join(incremental_dir, subpath))
+  return dex_files
+
+
+def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
+  """Deletes intermediate .dex files that are no longer needed."""
+  all_files = build_utils.FindInDirectory(dex_dir)
+  desired_files = set(dex_files)
+  for path in all_files:
+    if path not in desired_files:
+      os.unlink(path)
+
+
+def _ParseDesugarDeps(desugar_dependencies_file):
+  dependents_from_dependency = collections.defaultdict(set)
+  if desugar_dependencies_file and os.path.exists(desugar_dependencies_file):
+    with open(desugar_dependencies_file, 'r') as f:
+      for line in f:
+        dependent, dependency = line.rstrip().split(' -> ')
+        dependents_from_dependency[dependency].add(dependent)
+  return dependents_from_dependency
+
+
+def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file,
+                                   class_inputs, classpath):
+  dependents_from_dependency = _ParseDesugarDeps(desugar_dependencies_file)
+  required_classes = set()
+  # Gather classes that need to be re-desugared from changes in the classpath.
+  for jar in classpath:
+    for subpath in changes.IterChangedSubpaths(jar):
+      dependency = '{}:{}'.format(jar, subpath)
+      required_classes.update(dependents_from_dependency[dependency])
+
+  for jar in class_inputs:
+    for subpath in changes.IterChangedSubpaths(jar):
+      required_classes.update(dependents_from_dependency[subpath])
+
+  return required_classes
+
+
+def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set):
+  classes_list = []
+  for jar in class_inputs:
+    if changes:
+      changed_class_list = (set(changes.IterChangedSubpaths(jar))
+                            | required_classes_set)
+      predicate = lambda x: x in changed_class_list and x.endswith('.class')
+    else:
+      predicate = lambda x: x.endswith('.class')
+
+    classes_list.extend(
+        build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
+  return classes_list
+
+
+def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
+  # Create temporary directory for classes to be extracted to.
+  tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
+  os.mkdir(tmp_extract_dir)
+
+  # Do a full rebuild when changes occur in non-input files.
+  allowed_changed = set(options.class_inputs)
+  allowed_changed.update(options.dex_inputs)
+  allowed_changed.update(options.classpath)
+  strings_changed = changes.HasStringChanges()
+  non_direct_input_changed = next(
+      (p for p in changes.IterChangedPaths() if p not in allowed_changed), None)
+
+  if strings_changed or non_direct_input_changed:
+    logging.debug('Full dex required: strings_changed=%s path_changed=%s',
+                  strings_changed, non_direct_input_changed)
+    changes = None
+
+  if changes:
+    required_desugar_classes_set = _ComputeRequiredDesugarClasses(
+        changes, options.desugar_dependencies, options.class_inputs,
+        options.classpath)
+    logging.debug('Class files needing re-desugar: %d',
+                  len(required_desugar_classes_set))
+  else:
+    required_desugar_classes_set = set()
+  class_files = _ExtractClassFiles(changes, tmp_extract_dir,
+                                   options.class_inputs,
+                                   required_desugar_classes_set)
+  logging.debug('Extracted class files: %d', len(class_files))
+
+  # If the only change is deleting a file, class_files will be empty.
+  if class_files:
+    # Dex necessary classes into intermediate dex files.
+    dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
+    if options.desugar_dependencies and not options.skip_custom_d8:
+      dex_cmd += ['--file-tmp-prefix', tmp_extract_dir]
+    _RunD8(dex_cmd, class_files, options.incremental_dir,
+           options.warnings_as_errors,
+           options.show_desugar_default_interface_warnings)
+    logging.debug('Dexed class files.')
+
+
+def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
+  logging.debug('_OnStaleMd5')
+  with build_utils.TempDir() as tmp_dir:
+    if options.incremental_dir:
+      # Create directory for all intermediate dex files.
+      if not os.path.exists(options.incremental_dir):
+        os.makedirs(options.incremental_dir)
+
+      _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
+      logging.debug('Stale files deleted')
+      _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
+
+    _CreateFinalDex(
+        final_dex_inputs, options.output, tmp_dir, dex_cmd, options=options)
+
+
+def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar,
+                                  min_api):
+  dex_cmd = build_utils.JavaCmd(verify=False) + [
+      '-cp',
+      r8_jar_path,
+      'com.android.tools.r8.D8',
+      '--min-api',
+      min_api,
+  ]
+  with build_utils.TempDir() as tmp_dir:
+    _CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
+
+
+def main(args):
+  build_utils.InitLogging('DEX_DEBUG')
+  options = _ParseArgs(args)
+
+  options.class_inputs += options.class_inputs_filearg
+  options.dex_inputs += options.dex_inputs_filearg
+
+  input_paths = options.class_inputs + options.dex_inputs
+  input_paths.append(options.r8_jar_path)
+  input_paths.append(options.custom_d8_jar_path)
+  if options.main_dex_rules_path:
+    input_paths.extend(options.main_dex_rules_path)
+
+  depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg
+
+  output_paths = [options.output]
+
+  track_subpaths_allowlist = []
+  if options.incremental_dir:
+    final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
+        options.class_inputs, options.incremental_dir)
+    output_paths += final_dex_inputs
+    track_subpaths_allowlist += options.class_inputs
+  else:
+    final_dex_inputs = list(options.class_inputs)
+  final_dex_inputs += options.dex_inputs
+
+  dex_cmd = build_utils.JavaCmd(options.warnings_as_errors)
+
+  if options.dump_inputs:
+    dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip']
+
+  if not options.skip_custom_d8:
+    dex_cmd += [
+        '-cp',
+        '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path),
+        'org.chromium.build.CustomD8',
+    ]
+  else:
+    dex_cmd += [
+        '-cp',
+        options.r8_jar_path,
+        'com.android.tools.r8.D8',
+    ]
+
+  if options.release:
+    dex_cmd += ['--release']
+  if options.min_api:
+    dex_cmd += ['--min-api', options.min_api]
+
+  if not options.desugar:
+    dex_cmd += ['--no-desugaring']
+  elif options.classpath:
+    # The classpath is used by D8 to for interface desugaring.
+    if options.desugar_dependencies and not options.skip_custom_d8:
+      dex_cmd += ['--desugar-dependencies', options.desugar_dependencies]
+      if track_subpaths_allowlist:
+        track_subpaths_allowlist += options.classpath
+    depfile_deps += options.classpath
+    input_paths += options.classpath
+    # Still pass the entire classpath in case a new dependency is needed by
+    # desugar, so that desugar_dependencies will be updated for the next build.
+    for path in options.classpath:
+      dex_cmd += ['--classpath', path]
+
+  if options.classpath or options.main_dex_rules_path:
+    # --main-dex-rules requires bootclasspath.
+    dex_cmd += ['--lib', build_utils.JAVA_HOME]
+    for path in options.bootclasspath:
+      dex_cmd += ['--lib', path]
+    depfile_deps += options.bootclasspath
+    input_paths += options.bootclasspath
+
+
+  if options.desugar_jdk_libs_json:
+    dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
+  if options.force_enable_assertions:
+    dex_cmd += ['--force-enable-assertions']
+
+  # The changes feature from md5_check allows us to only re-dex the class files
+  # that have changed and the class files that need to be re-desugared by D8.
+  md5_check.CallAndWriteDepfileIfStale(
+      lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
+      options,
+      input_paths=input_paths,
+      input_strings=dex_cmd + [bool(options.incremental_dir)],
+      output_paths=output_paths,
+      pass_changes=True,
+      track_subpaths_allowlist=track_subpaths_allowlist,
+      depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/dex.pydeps b/src/build/android/gyp/dex.pydeps
new file mode 100644
index 0000000..23856f3
--- /dev/null
+++ b/src/build/android/gyp/dex.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+dex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/src/build/android/gyp/dex_jdk_libs.py b/src/build/android/gyp/dex_jdk_libs.py
new file mode 100755
index 0000000..6304779
--- /dev/null
+++ b/src/build/android/gyp/dex_jdk_libs.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--output', required=True, help='Dex output path.')
+  parser.add_argument('--r8-path', required=True, help='Path to R8 jar.')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument(
+      '--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.')
+  parser.add_argument('--desugar-jdk-libs-configuration-jar',
+                      help='Path to desugar_jdk_libs_configuration.jar.')
+  parser.add_argument('--min-api', help='minSdkVersion', required=True)
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options = parser.parse_args(args)
+  return options
+
+
+def DexJdkLibJar(r8_path,
+                 min_api,
+                 desugar_jdk_libs_json,
+                 desugar_jdk_libs_jar,
+                 desugar_jdk_libs_configuration_jar,
+                 output,
+                 warnings_as_errors,
+                 config_paths=None):
+  # TODO(agrieve): Spews a lot of stderr about missing classes.
+  with build_utils.TempDir() as tmp_dir:
+    cmd = build_utils.JavaCmd(warnings_as_errors) + [
+        '-cp',
+        r8_path,
+        'com.android.tools.r8.L8',
+        '--min-api',
+        min_api,
+        '--lib',
+        build_utils.JAVA_HOME,
+        '--desugared-lib',
+        desugar_jdk_libs_json,
+    ]
+
+    # If no desugaring is required, no keep rules are generated, and the keep
+    # file will not be created.
+    if config_paths is not None:
+      for path in config_paths:
+        cmd += ['--pg-conf', path]
+
+    cmd += [
+        '--output', tmp_dir, desugar_jdk_libs_jar,
+        desugar_jdk_libs_configuration_jar
+    ]
+
+    build_utils.CheckOutput(cmd,
+                            print_stdout=True,
+                            fail_on_output=warnings_as_errors)
+    if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
+      raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
+
+    # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used
+    # at all.
+    if os.path.exists(os.path.join(tmp_dir, 'classes.dex')):
+      shutil.move(os.path.join(tmp_dir, 'classes.dex'), output)
+      return True
+    return False
+
+
+def main(args):
+  options = _ParseArgs(args)
+  DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+               options.desugar_jdk_libs_jar,
+               options.desugar_jdk_libs_configuration_jar, options.output,
+               options.warnings_as_errors)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/dex_jdk_libs.pydeps b/src/build/android/gyp/dex_jdk_libs.pydeps
new file mode 100644
index 0000000..28d181f
--- /dev/null
+++ b/src/build/android/gyp/dex_jdk_libs.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex_jdk_libs.pydeps build/android/gyp/dex_jdk_libs.py
+../../gn_helpers.py
+dex_jdk_libs.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dexsplitter.py b/src/build/android/gyp/dexsplitter.py
new file mode 100755
index 0000000..149e994
--- /dev/null
+++ b/src/build/android/gyp/dexsplitter.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--depfile', help='Path to the depfile to write to.')
+  parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+  parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+  parser.add_argument(
+      '--input-dex-zip', help='Path to dex files in zip being split.')
+  parser.add_argument(
+      '--proguard-mapping-file', help='Path to proguard mapping file.')
+  parser.add_argument(
+      '--feature-name',
+      action='append',
+      dest='feature_names',
+      help='The name of the feature module.')
+  parser.add_argument(
+      '--feature-jars',
+      action='append',
+      help='GN list of path to jars which compirse the corresponding feature.')
+  parser.add_argument(
+      '--dex-dest',
+      action='append',
+      dest='dex_dests',
+      help='Destination for dex file of the corresponding feature.')
+  options = parser.parse_args(args)
+
+  assert len(options.feature_names) == len(options.feature_jars) and len(
+      options.feature_names) == len(options.dex_dests)
+  options.features = {}
+  for i, name in enumerate(options.feature_names):
+    options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+  return options
+
+
+def _RunDexsplitter(options, output_dir):
+  cmd = build_utils.JavaCmd() + [
+      '-cp',
+      options.r8_path,
+      'com.android.tools.r8.dexsplitter.DexSplitter',
+      '--output',
+      output_dir,
+      '--proguard-map',
+      options.proguard_mapping_file,
+  ]
+
+  for base_jar in options.features['base']:
+    cmd += ['--base-jar', base_jar]
+
+  base_jars_lookup = set(options.features['base'])
+  for feature in options.features:
+    if feature == 'base':
+      continue
+    for feature_jar in options.features[feature]:
+      if feature_jar not in base_jars_lookup:
+        cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+  with build_utils.TempDir() as temp_dir:
+    unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+    for file_name in unzipped_files:
+      cmd += ['--input', file_name]
+    build_utils.CheckOutput(cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseOptions(args)
+
+  input_paths = [options.input_dex_zip]
+  for feature_jars in options.features.itervalues():
+    for feature_jar in feature_jars:
+      input_paths.append(feature_jar)
+
+  with build_utils.TempDir() as dexsplitter_output_dir:
+    curr_location_to_dest = []
+    if len(options.features) == 1:
+      # Don't run dexsplitter since it needs at least 1 feature module.
+      curr_location_to_dest.append((options.input_dex_zip,
+                                    options.dex_dests[0]))
+    else:
+      _RunDexsplitter(options, dexsplitter_output_dir)
+
+      for i, dest in enumerate(options.dex_dests):
+        module_dex_file = os.path.join(dexsplitter_output_dir,
+                                       options.feature_names[i], 'classes.dex')
+        if os.path.exists(module_dex_file):
+          curr_location_to_dest.append((module_dex_file, dest))
+        else:
+          module_dex_file += '.jar'
+          assert os.path.exists(
+              module_dex_file), 'Dexsplitter tool output not found.'
+          curr_location_to_dest.append((module_dex_file + '.jar', dest))
+
+    for curr_location, dest in curr_location_to_dest:
+      with build_utils.AtomicOutput(dest) as f:
+        if curr_location.endswith('.jar'):
+          if dest.endswith('.jar'):
+            shutil.copy(curr_location, f.name)
+          else:
+            with zipfile.ZipFile(curr_location, 'r') as z:
+              namelist = z.namelist()
+              assert len(namelist) == 1, (
+                  'Unzipping to single dex file, but not single dex file in ' +
+                  options.input_dex_zip)
+              z.extract(namelist[0], f.name)
+        else:
+          if dest.endswith('.jar'):
+            build_utils.ZipDir(
+                f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+          else:
+            shutil.move(curr_location, f.name)
+
+  build_utils.Touch(options.stamp)
+  build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/dexsplitter.pydeps b/src/build/android/gyp/dexsplitter.pydeps
new file mode 100644
index 0000000..cefc572
--- /dev/null
+++ b/src/build/android/gyp/dexsplitter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
+../../gn_helpers.py
+dexsplitter.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dist_aar.py b/src/build/android/gyp/dist_aar.py
new file mode 100755
index 0000000..7f0de1d
--- /dev/null
+++ b/src/build/android/gyp/dist_aar.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an Android .aar file."""
+
+import argparse
+import os
+import posixpath
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import filter_zip
+from util import build_utils
+
+
+_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+def _MergeRTxt(r_paths, include_globs):
+  """Merging the given R.txt files and returns them as a string."""
+  all_lines = set()
+  for r_path in r_paths:
+    if include_globs and not build_utils.MatchesGlob(r_path, include_globs):
+      continue
+    with open(r_path) as f:
+      all_lines.update(f.readlines())
+  return ''.join(sorted(all_lines))
+
+
+def _MergeProguardConfigs(proguard_configs):
+  """Merging the given proguard config files and returns them as a string."""
+  ret = []
+  for config in proguard_configs:
+    ret.append('# FROM: {}'.format(config))
+    with open(config) as f:
+      ret.append(f.read())
+  return '\n'.join(ret)
+
+
+def _AddResources(aar_zip, resource_zips, include_globs):
+  """Adds all resource zips to the given aar_zip.
+
+  Ensures all res/values/* files have unique names by prefixing them.
+  """
+  for i, path in enumerate(resource_zips):
+    if include_globs and not build_utils.MatchesGlob(path, include_globs):
+      continue
+    with zipfile.ZipFile(path) as res_zip:
+      for info in res_zip.infolist():
+        data = res_zip.read(info)
+        dirname, basename = posixpath.split(info.filename)
+        if 'values' in dirname:
+          root, ext = os.path.splitext(basename)
+          basename = '{}_{}{}'.format(root, i, ext)
+          info.filename = posixpath.join(dirname, basename)
+        info.filename = posixpath.join('res', info.filename)
+        aar_zip.writestr(info, data)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--output', required=True, help='Path to output aar.')
+  parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
+  parser.add_argument('--dependencies-res-zips', required=True,
+                      help='GN list of resource zips')
+  parser.add_argument('--r-text-files', required=True,
+                      help='GN list of R.txt files to merge')
+  parser.add_argument('--proguard-configs', required=True,
+                      help='GN list of ProGuard flag files to merge.')
+  parser.add_argument(
+      '--android-manifest',
+      help='Path to AndroidManifest.xml to include.',
+      default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml'))
+  parser.add_argument('--native-libraries', default='',
+                      help='GN list of native libraries. If non-empty then '
+                      'ABI must be specified.')
+  parser.add_argument('--abi',
+                      help='ABI (e.g. armeabi-v7a) for native libraries.')
+  parser.add_argument(
+      '--jar-excluded-globs',
+      help='GN-list of globs for paths to exclude in jar.')
+  parser.add_argument(
+      '--jar-included-globs',
+      help='GN-list of globs for paths to include in jar.')
+  parser.add_argument(
+      '--resource-included-globs',
+      help='GN-list of globs for paths to include in R.txt and resources zips.')
+
+  options = parser.parse_args(args)
+
+  if options.native_libraries and not options.abi:
+    parser.error('You must provide --abi if you have native libs')
+
+  options.jars = build_utils.ParseGnList(options.jars)
+  options.dependencies_res_zips = build_utils.ParseGnList(
+      options.dependencies_res_zips)
+  options.r_text_files = build_utils.ParseGnList(options.r_text_files)
+  options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+  options.jar_excluded_globs = build_utils.ParseGnList(
+      options.jar_excluded_globs)
+  options.jar_included_globs = build_utils.ParseGnList(
+      options.jar_included_globs)
+  options.resource_included_globs = build_utils.ParseGnList(
+      options.resource_included_globs)
+
+  with tempfile.NamedTemporaryFile(delete=False) as staging_file:
+    try:
+      with zipfile.ZipFile(staging_file.name, 'w') as z:
+        build_utils.AddToZipHermetic(
+            z, 'AndroidManifest.xml', src_path=options.android_manifest)
+
+        path_transform = filter_zip.CreatePathTransform(
+            options.jar_excluded_globs, options.jar_included_globs, [])
+        with tempfile.NamedTemporaryFile() as jar_file:
+          build_utils.MergeZips(
+              jar_file.name, options.jars, path_transform=path_transform)
+          build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
+
+        build_utils.AddToZipHermetic(
+            z,
+            'R.txt',
+            data=_MergeRTxt(options.r_text_files,
+                            options.resource_included_globs))
+        build_utils.AddToZipHermetic(z, 'public.txt', data='')
+
+        if options.proguard_configs:
+          build_utils.AddToZipHermetic(
+              z, 'proguard.txt',
+              data=_MergeProguardConfigs(options.proguard_configs))
+
+        _AddResources(z, options.dependencies_res_zips,
+                      options.resource_included_globs)
+
+        for native_library in options.native_libraries:
+          libname = os.path.basename(native_library)
+          build_utils.AddToZipHermetic(
+              z, os.path.join('jni', options.abi, libname),
+              src_path=native_library)
+    except:
+      os.unlink(staging_file.name)
+      raise
+    shutil.move(staging_file.name, options.output)
+
+  if options.depfile:
+    all_inputs = (options.jars + options.dependencies_res_zips +
+                  options.r_text_files + options.proguard_configs)
+    build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/dist_aar.pydeps b/src/build/android/gyp/dist_aar.pydeps
new file mode 100644
index 0000000..3182580
--- /dev/null
+++ b/src/build/android/gyp/dist_aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../gn_helpers.py
+dist_aar.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/extract_unwind_tables.py b/src/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 0000000..25c3130
--- /dev/null
+++ b/src/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the number of entries in UNW_INDEX.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+  1. First column 4 byte rows of all the function start address as offset from
+     start of the binary, in sorted order.
+  2. For each function addr, the second column contains 2 byte indices in order.
+     The indices are offsets (in count of 2 bytes) of the CFI data from start of
+     UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+  2 bytes: N - number of rows that belong to current function.
+  N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+                               14 bits : CFA offset / 4.
+                                2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+    0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+    offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+    use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+    functions which have variable arguments can have offset upto 16.
+    TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+    we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+    functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+  extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+      --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+  """Writes a 32 bit unsigned integer to the given output file."""
+  output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+  """Writes a 16 bit unsigned integer to the given output file."""
+  output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+  """Returns the postfix expression as string for a given register.
+
+  Breakpad CFI row format specifies rules for unwinding each register in postfix
+  expression form separated by space. Each rule starts with register name and a
+  colon. Eg: "CFI R1: <rule> R2: <rule>".
+  """
+  out = []
+  found_register = False
+  for part in cfi_row:
+    if found_register:
+      if part[-1] == ':':
+        break
+      out.append(part)
+    elif part == reg + ':':
+      found_register = True
+  return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+  """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+  Returns right values if rule matches the predefined criteria. Returns (0, 0)
+  otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+  is postfix form "CFA -<val> + ^".
+  """
+  cfa_offset = 0
+  ra_offset = 0
+  cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+  ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+  if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+    cfa_offset = int(cfa_rule.split()[1], 10)
+  if ra_rule:
+    if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+      return (0, 0)
+    ra_offset = -1 * int(ra_rule.split()[1], 10)
+  return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+  """Returns parsed CFI data from given symbol_file.
+
+  Each entry in the cfi data dictionary returned is a map from function start
+  address to array of function rows, starting with FUNCTION type, followed by
+  one or more CFI rows.
+  """
+  cfi_data = {}
+  current_func = []
+  for line in symbol_file:
+    line = line.decode('utf8')
+    if 'STACK CFI' not in line:
+      continue
+
+    parts = line.split()
+    data = {}
+    if parts[2] == 'INIT':
+      # Add the previous function to the output
+      if len(current_func) > 1:
+        cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+      current_func = []
+
+      # The function line is of format "STACK CFI INIT <addr> <length> ..."
+      data[_ADDR_ENTRY] = int(parts[3], 16)
+      data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+      # Condition C1: Skip if length is large.
+      if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+        continue  # Skip the current function.
+    else:
+      # The current function is skipped.
+      if len(current_func) == 0:
+        continue
+
+      # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+      data[_ADDR_ENTRY] = int(parts[2], 16)
+      (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+      # Condition C2 and C3: Skip based on limits on offsets.
+      if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+        current_func = []
+        continue
+      assert data[_CFA_REG] % 4 == 0
+      # Since we skipped functions with code size larger than 0xffff, we should
+      # have no function offset larger than the same value.
+      assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+    if data[_ADDR_ENTRY] == 0:
+      # Skip current function, delete all previous entries.
+      current_func = []
+      continue
+    assert data[_ADDR_ENTRY] % 2 == 0
+    current_func.append(data)
+
+  # Condition C4: Skip function without CFI rows.
+  if len(current_func) > 1:
+    cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+  return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+  """Writes the CFI data in defined format to out_file."""
+  # Stores the final data that will be written to UNW_DATA table, in order
+  # with 2 byte items.
+  unw_data = []
+
+  # Represent all the CFI data of functions as set of numbers and map them to an
+  # index in the |unw_data|. This index is later written to the UNW_INDEX table
+  # for each function. This map is used to find index of the data for functions.
+  data_to_index = {}
+  # Store mapping between the functions to the index.
+  func_addr_to_index = {}
+  previous_func_end = 0
+  for addr, function in sorted(cfi_data.items()):
+    # Add an empty function entry when functions CFIs are missing between 2
+    # functions.
+    if previous_func_end != 0 and addr - previous_func_end  > 4:
+      func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+    previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+    assert len(function) > 1
+    func_data_arr = []
+    func_data = 0
+    # The first row contains the function address and length. The rest of the
+    # rows have CFI data. Create function data array as given in the format.
+    for row in function[1:]:
+      addr_offset = row[_ADDR_ENTRY] - addr
+      cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] // 4)
+
+      func_data_arr.append(addr_offset)
+      func_data_arr.append(cfa_offset)
+
+    # Consider all the rows in the data as one large integer and add it as a key
+    # to the |data_to_index|.
+    for data in func_data_arr:
+      func_data = (func_data << 16) | data
+
+    row_count = len(func_data_arr) // 2
+    if func_data not in data_to_index:
+      # When data is not found, create a new index = len(unw_data), and write
+      # the data to |unw_data|.
+      index = len(unw_data)
+      data_to_index[func_data] = index
+      unw_data.append(row_count)
+      for row in func_data_arr:
+        unw_data.append(row)
+    else:
+      # If the data was found, then use the same index for the function.
+      index = data_to_index[func_data]
+      assert row_count == unw_data[index]
+    func_addr_to_index[addr] = data_to_index[func_data]
+
+  # Mark the end end of last function entry.
+  func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+  # Write the size of UNW_INDEX file in bytes.
+  _Write4Bytes(out_file, len(func_addr_to_index))
+
+  # Write the UNW_INDEX table. First list of addresses and then indices.
+  sorted_unw_index = sorted(func_addr_to_index.items())
+  for addr, index in sorted_unw_index:
+    _Write4Bytes(out_file, addr)
+  for addr, index in sorted_unw_index:
+    _Write2Bytes(out_file, index)
+
+  # Write the UNW_DATA table.
+  for data in unw_data:
+    _Write2Bytes(out_file, data)
+
+
+def _ParseCfiData(sym_stream, output_path):
+  cfi_data = _GetAllCfiRows(sym_stream)
+  with open(output_path, 'wb') as out_file:
+    _WriteCfiData(cfi_data, out_file)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--input_path', required=True,
+      help='The input path of the unstripped binary')
+  parser.add_argument(
+      '--output_path', required=True,
+      help='The path of the output file')
+  parser.add_argument(
+      '--dump_syms_path', required=True,
+      help='The path of the dump_syms binary')
+
+  args = parser.parse_args()
+  cmd = ['./' + args.dump_syms_path, args.input_path]
+  proc = subprocess.Popen(cmd, bufsize=-1, stdout=subprocess.PIPE)
+  _ParseCfiData(proc.stdout, args.output_path)
+  assert proc.wait() == 0
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/extract_unwind_tables_tests.py b/src/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 0000000..59436ff
--- /dev/null
+++ b/src/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+  def testExtractCfi(self):
+    with tempfile.NamedTemporaryFile() as output_file:
+      test_data_lines = """
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""".splitlines()
+      extract_unwind_tables._ParseCfiData(
+          [l.encode('utf8') for l in test_data_lines], output_file.name)
+
+      expected_cfi_data = {
+        0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+        0xe1a296 : [],
+        0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+        0xe1a990 : [],
+        0x3b92e24: [0x28, 0x13],
+        0x3b92e62: [],
+      }
+      expected_function_count = len(expected_cfi_data)
+
+      actual_output = []
+      with open(output_file.name, 'rb') as f:
+        while True:
+          read = f.read(2)
+          if not read:
+            break
+          actual_output.append(struct.unpack('H', read)[0])
+
+      # First value is size of unw_index table.
+      unw_index_size = actual_output[1] << 16 | actual_output[0]
+      # |unw_index_size| should match entry count.
+      self.assertEqual(expected_function_count, unw_index_size)
+      # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+      # size.
+      unw_index_start = 2
+      unw_index_addr_end = unw_index_start + expected_function_count * 2
+      unw_index_end = unw_index_addr_end + expected_function_count
+      unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+      unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+      unw_data_start = unw_index_end
+      unw_data = actual_output[unw_data_start:]
+
+      for func_iter in range(0, expected_function_count):
+        func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+                     unw_index_addr_col[func_iter * 2])
+        index = unw_index_index_col[func_iter]
+        # If index is CANT_UNWIND then invalid function.
+        if index == 0xFFFF:
+          self.assertEqual(expected_cfi_data[func_addr], [])
+          continue
+
+        func_start = index + 1
+        func_end = func_start + unw_data[index] * 2
+        self.assertEqual(len(expected_cfi_data[func_addr]),
+                         func_end - func_start)
+        func_cfi = unw_data[func_start : func_end]
+        self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/filter_zip.py b/src/build/android/gyp/filter_zip.py
new file mode 100755
index 0000000..068ff03
--- /dev/null
+++ b/src/build/android/gyp/filter_zip.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import shutil
+import sys
+
+from util import build_utils
+
+
+_RESOURCE_CLASSES = [
+    "R.class",
+    "R##*.class",
+    "Manifest.class",
+    "Manifest##*.class",
+]
+
+
+def CreatePathTransform(exclude_globs, include_globs,
+                        strip_resource_classes_for):
+  """Returns a function to strip paths for the given patterns.
+
+  Args:
+    exclude_globs: List of globs that if matched should be excluded.
+    include_globs: List of globs that if not matched should be excluded.
+    strip_resource_classes_for: List of Java packages for which to strip
+       R.java classes from.
+
+  Returns:
+    * None if no filters are needed.
+    * A function "(path) -> path" that returns None when |path| should be
+          stripped, or |path| otherwise.
+  """
+  if not (exclude_globs or include_globs or strip_resource_classes_for):
+    return None
+  exclude_globs = list(exclude_globs or [])
+  if strip_resource_classes_for:
+    exclude_globs.extend(p.replace('.', '/') + '/' + f
+                         for p in strip_resource_classes_for
+                         for f in _RESOURCE_CLASSES)
+  def path_transform(path):
+    # Exclude filters take precidence over include filters.
+    if build_utils.MatchesGlob(path, exclude_globs):
+      return None
+    if include_globs and not build_utils.MatchesGlob(path, include_globs):
+      return None
+    return path
+
+  return path_transform
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--input', required=True,
+      help='Input zip file.')
+  parser.add_argument('--output', required=True,
+      help='Output zip file')
+  parser.add_argument('--exclude-globs',
+      help='GN list of exclude globs')
+  parser.add_argument('--include-globs',
+      help='GN list of include globs')
+  parser.add_argument('--strip-resource-classes-for',
+      help='GN list of java package names exclude R.class files in.')
+
+  argv = build_utils.ExpandFileArgs(sys.argv[1:])
+  args = parser.parse_args(argv)
+
+  args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+  args.include_globs = build_utils.ParseGnList(args.include_globs)
+  args.strip_resource_classes_for = build_utils.ParseGnList(
+      args.strip_resource_classes_for)
+
+  path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
+                                       args.strip_resource_classes_for)
+  with build_utils.AtomicOutput(args.output) as f:
+    if path_transform:
+      build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+    else:
+      shutil.copy(args.input, f.name)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/filter_zip.pydeps b/src/build/android/gyp/filter_zip.pydeps
new file mode 100644
index 0000000..f561e05
--- /dev/null
+++ b/src/build/android/gyp/filter_zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../gn_helpers.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/finalize_apk.py b/src/build/android/gyp/finalize_apk.py
new file mode 100644
index 0000000..b465f71
--- /dev/null
+++ b/src/build/android/gyp/finalize_apk.py
@@ -0,0 +1,78 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and aligns an APK."""
+
+import argparse
+import logging
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from util import build_utils
+
+
+def FinalizeApk(apksigner_path,
+                zipalign_path,
+                unsigned_apk_path,
+                final_apk_path,
+                key_path,
+                key_passwd,
+                key_name,
+                min_sdk_version,
+                warnings_as_errors=False):
+  # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime
+  # and a corrupted state.
+  with tempfile.NamedTemporaryFile() as staging_file:
+    if zipalign_path:
+      # v2 signing requires that zipalign happen first.
+      logging.debug('Running zipalign')
+      zipalign_cmd = [
+          zipalign_path, '-p', '-f', '4', unsigned_apk_path, staging_file.name
+      ]
+      build_utils.CheckOutput(zipalign_cmd,
+                              print_stdout=True,
+                              fail_on_output=warnings_as_errors)
+      signer_input_path = staging_file.name
+    else:
+      signer_input_path = unsigned_apk_path
+
+    sign_cmd = build_utils.JavaCmd(warnings_as_errors) + [
+        '-jar',
+        apksigner_path,
+        'sign',
+        '--in',
+        signer_input_path,
+        '--out',
+        staging_file.name,
+        '--ks',
+        key_path,
+        '--ks-key-alias',
+        key_name,
+        '--ks-pass',
+        'pass:' + key_passwd,
+    ]
+    # V3 signing adds security niceties, which are irrelevant for local builds.
+    sign_cmd += ['--v3-signing-enabled', 'false']
+
+    if min_sdk_version >= 24:
+      # Disable v1 signatures when v2 signing can be used (it's much faster).
+      # By default, both v1 and v2 signing happen.
+      sign_cmd += ['--v1-signing-enabled', 'false']
+    else:
+      # Force SHA-1 (makes signing faster; insecure is fine for local builds).
+      # Leave v2 signing enabled since it verifies faster on device when
+      # supported.
+      sign_cmd += ['--min-sdk-version', '1']
+
+    logging.debug('Signing apk')
+    build_utils.CheckOutput(sign_cmd,
+                            print_stdout=True,
+                            fail_on_output=warnings_as_errors)
+    shutil.move(staging_file.name, final_apk_path)
+    # TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+    if sys.version_info.major == 2:
+      staging_file.delete = False
+    else:
+      staging_file._closer.delete = False
diff --git a/src/build/android/gyp/find.py b/src/build/android/gyp/find.py
new file mode 100755
index 0000000..b05874b
--- /dev/null
+++ b/src/build/android/gyp/find.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+from __future__ import print_function
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    if not os.path.exists(d):
+      print('%s does not exist' % d, file=sys.stderr)
+      return 1
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print(os.path.join(root, f))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/gcc_preprocess.py b/src/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..70ae10f
--- /dev/null
+++ b/src/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import posixpath
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParsePackageName(data):
+  m = re.match(r'^\s*package\s+(.*?)\s*;', data, re.MULTILINE)
+  return m.group(1) if m else ''
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--include-dirs', help='GN list of include directories.')
+  parser.add_argument('--output', help='Path for .srcjar.')
+  parser.add_argument('--define',
+                      action='append',
+                      dest='defines',
+                      help='List of -D args')
+  parser.add_argument('templates', nargs='+', help='Template files.')
+  options = parser.parse_args(args)
+
+  options.defines = build_utils.ParseGnList(options.defines)
+  options.include_dirs = build_utils.ParseGnList(options.include_dirs)
+
+  gcc_cmd = [
+      'gcc',
+      '-E',  # stop after preprocessing.
+      '-DANDROID',  # Specify ANDROID define for pre-processor.
+      '-x',
+      'c-header',  # treat sources as C header files
+      '-P',  # disable line markers, i.e. '#line 309'
+  ]
+  gcc_cmd.extend('-D' + x for x in options.defines)
+  gcc_cmd.extend('-I' + x for x in options.include_dirs)
+
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f, 'w') as z:
+      for template in options.templates:
+        data = build_utils.CheckOutput(gcc_cmd + [template])
+        package_name = _ParsePackageName(data)
+        if not package_name:
+          raise Exception('Could not find java package of ' + template)
+        zip_path = posixpath.join(
+            package_name.replace('.', '/'),
+            os.path.splitext(os.path.basename(template))[0]) + '.java'
+        build_utils.AddToZipHermetic(z, zip_path, data=data)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/gcc_preprocess.pydeps b/src/build/android/gyp/gcc_preprocess.pydeps
new file mode 100644
index 0000000..39e56f7
--- /dev/null
+++ b/src/build/android/gyp/gcc_preprocess.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../gn_helpers.py
+gcc_preprocess.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/generate_android_wrapper.py b/src/build/android/gyp/generate_android_wrapper.py
new file mode 100755
index 0000000..c8b762c
--- /dev/null
+++ b/src/build/android/gyp/generate_android_wrapper.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+from util import build_utils
+
+sys.path.append(
+    os.path.abspath(
+        os.path.join(os.path.dirname(__file__), '..', '..', 'util')))
+
+import generate_wrapper
+
+_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)')
+
+
+def ExpandWrappedPathLists(args):
+  expanded_args = []
+  for arg in args:
+    m = _WRAPPED_PATH_LIST_RE.match(arg)
+    if m:
+      for p in build_utils.ParseGnList(m.group(2)):
+        expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
+    else:
+      expanded_args.append(arg)
+  return expanded_args
+
+
+def main(raw_args):
+  parser = generate_wrapper.CreateArgumentParser()
+  expanded_raw_args = build_utils.ExpandFileArgs(raw_args)
+  expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args)
+  args = parser.parse_args(expanded_raw_args)
+  return generate_wrapper.Wrap(args)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/generate_linker_version_script.py b/src/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 0000000..995fcd7
--- /dev/null
+++ b/src/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE.  DO NOT MODIFY.
+#
+# See: %s
+
+{
+  global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+  local:
+    *;
+};
+"""
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--output',
+      required=True,
+      help='Path to output linker version script file.')
+  parser.add_argument(
+      '--export-java-symbols',
+      action='store_true',
+      help='Export Java_* JNI methods')
+  parser.add_argument(
+      '--export-symbol-allowlist-file',
+      action='append',
+      default=[],
+      dest='allowlists',
+      help='Path to an input file containing an allowlist of extra symbols to '
+      'export, one symbol per line. Multiple files may be specified.')
+  parser.add_argument(
+      '--export-feature-registrations',
+      action='store_true',
+      help='Export JNI_OnLoad_* methods')
+  options = parser.parse_args()
+
+  # JNI_OnLoad is always exported.
+  # CrashpadHandlerMain() is the entry point to the Crashpad handler, required
+  # for libcrashpad_handler_trampoline.so.
+  symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
+
+  if options.export_java_symbols:
+    symbol_list.append('Java_*')
+
+  if options.export_feature_registrations:
+    symbol_list.append('JNI_OnLoad_*')
+
+  for allowlist in options.allowlists:
+    with open(allowlist, 'rt') as f:
+      for line in f:
+        line = line.strip()
+        if not line or line[0] == '#':
+          continue
+        symbol_list.append(line)
+
+  script_content = [_SCRIPT_HEADER]
+  for symbol in symbol_list:
+    script_content.append('    %s;\n' % symbol)
+  script_content.append(_SCRIPT_FOOTER)
+
+  script = ''.join(script_content)
+
+  with build_utils.AtomicOutput(options.output, mode='w') as f:
+    f.write(script)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/generate_linker_version_script.pydeps b/src/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 0000000..de9fa56
--- /dev/null
+++ b/src/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/ijar.py b/src/build/android/gyp/ijar.py
new file mode 100755
index 0000000..45413f6
--- /dev/null
+++ b/src/build/android/gyp/ijar.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+# python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')"
+# du -b test.jar
+_EMPTY_JAR_SIZE = 22
+
+
+def main():
+  # The point of this wrapper is to use AtomicOutput so that output timestamps
+  # are not updated when outputs are unchanged.
+  ijar_bin, in_jar, out_jar = sys.argv[1:]
+  with build_utils.AtomicOutput(out_jar) as f:
+    # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162
+    if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE:
+      with open(in_jar, 'rb') as in_f:
+        f.write(in_f.read())
+    else:
+      build_utils.CheckOutput([ijar_bin, in_jar, f.name])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/ijar.pydeps b/src/build/android/gyp/ijar.pydeps
new file mode 100644
index 0000000..e9ecb66
--- /dev/null
+++ b/src/build/android/gyp/ijar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../gn_helpers.py
+ijar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/jacoco_instr.py b/src/build/android/gyp/jacoco_instr.py
new file mode 100755
index 0000000..8e5f29c
--- /dev/null
+++ b/src/build/android/gyp/jacoco_instr.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'jacoco_instr' action in the Java build process.
+Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will
+call the instrument command which accepts a jar and instruments it using
+jacococli.jar.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+  """Adds arguments related to instrumentation to parser.
+
+  Args:
+    parser: ArgumentParser object.
+  """
+  parser.add_argument(
+      '--input-path',
+      required=True,
+      help='Path to input file(s). Either the classes '
+      'directory, or the path to a jar.')
+  parser.add_argument(
+      '--output-path',
+      required=True,
+      help='Path to output final file(s) to. Either the '
+      'final classes directory, or the directory in '
+      'which to place the instrumented/copied jar.')
+  parser.add_argument(
+      '--sources-json-file',
+      required=True,
+      help='File to create with the list of source directories '
+      'and input path.')
+  parser.add_argument(
+      '--java-sources-file',
+      required=True,
+      help='File containing newline-separated .java paths')
+  parser.add_argument(
+      '--jacococli-jar', required=True, help='Path to jacococli.jar.')
+  parser.add_argument(
+      '--files-to-instrument',
+      help='Path to a file containing which source files are affected.')
+
+
+def _GetSourceDirsFromSourceFiles(source_files):
+  """Returns list of directories for the files in |source_files|.
+
+  Args:
+    source_files: List of source files.
+
+  Returns:
+    List of source directories.
+  """
+  return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file,
+                           src_root):
+  """Adds all normalized source directories and input path to
+  |sources_json_file|.
+
+  Args:
+    source_dirs: List of source directories.
+    input_path: The input path to non-instrumented class files.
+    sources_json_file: File into which to write the list of source directories
+    and input path.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  relative_sources = []
+  for s in source_dirs:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print('Error: found source directory not under repository root: %s %s' %
+            (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  data = {}
+  data['source_dirs'] = relative_sources
+  data['input_path'] = []
+  if input_path:
+    data['input_path'].append(os.path.abspath(input_path))
+  with open(sources_json_file, 'w') as f:
+    json.dump(data, f)
+
+
+def _GetAffectedClasses(jar_file, source_files):
+  """Gets affected classes by affected source files to a jar.
+
+  Args:
+    jar_file: The jar file to get all members.
+    source_files: The list of affected source files.
+
+  Returns:
+    A tuple of affected classes and unaffected members.
+  """
+  with zipfile.ZipFile(jar_file) as f:
+    members = f.namelist()
+
+  affected_classes = []
+  unaffected_members = []
+
+  for member in members:
+    if not member.endswith('.class'):
+      unaffected_members.append(member)
+      continue
+
+    is_affected = False
+    index = member.find('$')
+    if index == -1:
+      index = member.find('.class')
+    for source_file in source_files:
+      if source_file.endswith(member[:index] + '.java'):
+        affected_classes.append(member)
+        is_affected = True
+        break
+    if not is_affected:
+      unaffected_members.append(member)
+
+  return affected_classes, unaffected_members
+
+
+def _InstrumentClassFiles(instrument_cmd,
+                          input_path,
+                          output_path,
+                          temp_dir,
+                          affected_source_files=None):
+  """Instruments class files from input jar.
+
+  Args:
+    instrument_cmd: JaCoCo instrument command.
+    input_path: The input path to non-instrumented jar.
+    output_path: The output path to instrumented jar.
+    temp_dir: The temporary directory.
+    affected_source_files: The affected source file paths to input jar.
+      Default is None, which means instrumenting all class files in jar.
+  """
+  affected_classes = None
+  unaffected_members = None
+  if affected_source_files:
+    affected_classes, unaffected_members = _GetAffectedClasses(
+        input_path, affected_source_files)
+
+  # Extract affected class files.
+  with zipfile.ZipFile(input_path) as f:
+    f.extractall(temp_dir, affected_classes)
+
+  instrumented_dir = os.path.join(temp_dir, 'instrumented')
+
+  # Instrument extracted class files.
+  instrument_cmd.extend([temp_dir, '--dest', instrumented_dir])
+  build_utils.CheckOutput(instrument_cmd)
+
+  if affected_source_files and unaffected_members:
+    # Extract unaffected members to instrumented_dir.
+    with zipfile.ZipFile(input_path) as f:
+      f.extractall(instrumented_dir, unaffected_members)
+
+  # Zip all files to output_path
+  build_utils.ZipDir(output_path, instrumented_dir)
+
+
+def _RunInstrumentCommand(parser):
+  """Instruments class or Jar files using JaCoCo.
+
+  Args:
+    parser: ArgumentParser object.
+
+  Returns:
+    An exit code.
+  """
+  args = parser.parse_args()
+
+  source_files = []
+  if args.java_sources_file:
+    source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
+
+  with build_utils.TempDir() as temp_dir:
+    instrument_cmd = build_utils.JavaCmd() + [
+        '-jar', args.jacococli_jar, 'instrument'
+    ]
+
+    if not args.files_to_instrument:
+      _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+                            temp_dir)
+    else:
+      affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
+      source_set = set(source_files)
+      affected_source_files = [f for f in affected_files if f in source_set]
+
+      # Copy input_path to output_path and return if no source file affected.
+      if not affected_source_files:
+        shutil.copyfile(args.input_path, args.output_path)
+        # Create a dummy sources_json_file.
+        _CreateSourcesJsonFile([], None, args.sources_json_file,
+                               build_utils.DIR_SOURCE_ROOT)
+        return 0
+      else:
+        _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+                              temp_dir, affected_source_files)
+
+  source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+  # TODO(GYP): In GN, we are passed the list of sources, detecting source
+  # directories, then walking them to re-establish the list of sources.
+  # This can obviously be simplified!
+  _CreateSourcesJsonFile(source_dirs, args.input_path, args.sources_json_file,
+                         build_utils.DIR_SOURCE_ROOT)
+
+  return 0
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  _AddArguments(parser)
+  _RunInstrumentCommand(parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/jacoco_instr.pydeps b/src/build/android/gyp/jacoco_instr.pydeps
new file mode 100644
index 0000000..d7fec19
--- /dev/null
+++ b/src/build/android/gyp/jacoco_instr.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py
+../../gn_helpers.py
+jacoco_instr.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/java_cpp_enum.py b/src/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..08a381a
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,437 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import textwrap
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_ALLOWLIST = [
+    'char', 'unsigned char', 'short', 'unsigned short', 'int', 'int8_t',
+    'int16_t', 'int32_t', 'uint8_t', 'uint16_t'
+]
+
+
+class EnumDefinition(object):
+  def __init__(self, original_enum_name=None, class_name_override=None,
+               enum_package=None, entries=None, comments=None, fixed_type=None):
+    self.original_enum_name = original_enum_name
+    self.class_name_override = class_name_override
+    self.enum_package = enum_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.comments = collections.OrderedDict(comments or [])
+    self.prefix_to_strip = None
+    self.fixed_type = fixed_type
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  def AppendEntryComment(self, key, value):
+    if key in self.comments:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.comments[key] = value
+
+  @property
+  def class_name(self):
+    return self.class_name_override or self.original_enum_name
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+    self._NormalizeNames()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.enum_package
+    assert self.entries
+    if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_ALLOWLIST:
+      raise Exception('Fixed type %s for enum %s not in allowlist.' %
+                      (self.fixed_type, self.class_name))
+
+  def _AssignEntryIndices(self):
+    # Enums, if given no value, are given the value of the previous enum + 1.
+    if not all(self.entries.values()):
+      prev_enum_value = -1
+      for key, value in self.entries.items():
+        if not value:
+          self.entries[key] = prev_enum_value + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          try:
+            self.entries[key] = int(value)
+          except ValueError:
+            raise Exception('Could not interpret integer from enum value "%s" '
+                            'for key %s.' % (value, key))
+        prev_enum_value = self.entries[key]
+
+
+  def _StripPrefix(self):
+    prefix_to_strip = self.prefix_to_strip
+    if not prefix_to_strip:
+      shout_case = self.original_enum_name
+      shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper()
+      shout_case += '_'
+
+      prefixes = [shout_case, self.original_enum_name,
+                  'k' + self.original_enum_name]
+
+      for prefix in prefixes:
+        if all([w.startswith(prefix) for w in self.entries.keys()]):
+          prefix_to_strip = prefix
+          break
+      else:
+        prefix_to_strip = ''
+
+    def StripEntries(entries):
+      ret = collections.OrderedDict()
+      for k, v in entries.items():
+        stripped_key = k.replace(prefix_to_strip, '', 1)
+        if isinstance(v, str):
+          stripped_value = v.replace(prefix_to_strip, '')
+        else:
+          stripped_value = v
+        ret[stripped_key] = stripped_value
+
+      return ret
+
+    self.entries = StripEntries(self.entries)
+    self.comments = StripEntries(self.comments)
+
+  def _NormalizeNames(self):
+    self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty)
+    self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty)
+
+
+def _TransformKeys(d, func):
+  """Normalize keys in |d| and update references to old keys in |d| values."""
+  keys_map = {k: func(k) for k in d}
+  ret = collections.OrderedDict()
+  for k, v in d.items():
+    # Need to transform values as well when the entry value was explicitly set
+    # (since it could contain references to other enum entry values).
+    if isinstance(v, str):
+      # First check if a full replacement is available. This avoids issues when
+      # one key is a substring of another.
+      if v in d:
+        v = keys_map[v]
+      else:
+        for old_key, new_key in keys_map.items():
+          v = v.replace(old_key, new_key)
+    ret[keys_map[k]] = v
+  return ret
+
+
+class DirectiveSet(object):
+  class_name_override_key = 'CLASS_NAME_OVERRIDE'
+  enum_package_key = 'ENUM_PACKAGE'
+  prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+  known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+  def __init__(self):
+    self._directives = {}
+
+  def Update(self, key, value):
+    if key not in DirectiveSet.known_keys:
+      raise Exception("Unknown directive: " + key)
+    self._directives[key] = value
+
+  @property
+  def empty(self):
+    return len(self._directives) == 0
+
+  def UpdateDefinition(self, definition):
+    definition.class_name_override = self._directives.get(
+        DirectiveSet.class_name_override_key, '')
+    definition.enum_package = self._directives.get(
+        DirectiveSet.enum_package_key)
+    definition.prefix_to_strip = self._directives.get(
+        DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+  enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+  generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+  multi_line_generator_directive_start_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+  multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$')
+  multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$')
+
+  optional_class_or_struct_re = r'(class|struct)?'
+  enum_name_re = r'(\w+)'
+  optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+  enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+      optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+      optional_fixed_type_re + '\s*{\s*')
+  enum_single_line_re = re.compile(
+      r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$')
+
+  def __init__(self, lines, path=''):
+    self._lines = lines
+    self._path = path
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._current_comments = []
+    self._generator_directives = DirectiveSet()
+    self._multi_line_generator_directive = None
+    self._current_enum_entry = ''
+
+  def _ApplyGeneratorDirectives(self):
+    self._generator_directives.UpdateDefinition(self._current_definition)
+    self._generator_directives = DirectiveSet()
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if self._multi_line_generator_directive:
+      self._ParseMultiLineDirectiveLine(line)
+    elif not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported in ' +
+                      self._path)
+
+    enum_comment = HeaderParser.single_line_comment_re.match(line)
+    if enum_comment:
+      comment = enum_comment.groups()[0]
+      if comment:
+        self._current_comments.append(comment)
+    elif HeaderParser.enum_end_re.match(line):
+      self._FinalizeCurrentEnumDefinition()
+    else:
+      self._AddToCurrentEnumEntry(line)
+      if ',' in line:
+        self._ParseCurrentEnumEntry()
+
+  def _ParseSingleLineEnum(self, line):
+    for entry in line.split(','):
+      self._AddToCurrentEnumEntry(entry)
+      self._ParseCurrentEnumEntry()
+
+    self._FinalizeCurrentEnumDefinition()
+
+  def _ParseCurrentEnumEntry(self):
+    if not self._current_enum_entry:
+      return
+
+    enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry)
+    if not enum_entry:
+      raise Exception('Unexpected error while attempting to parse %s as enum '
+                      'entry.' % self._current_enum_entry)
+
+    enum_key = enum_entry.groups()[0]
+    enum_value = enum_entry.groups()[2]
+    self._current_definition.AppendEntry(enum_key, enum_value)
+    if self._current_comments:
+      self._current_definition.AppendEntryComment(
+          enum_key, ' '.join(self._current_comments))
+      self._current_comments = []
+    self._current_enum_entry = ''
+
+  def _AddToCurrentEnumEntry(self, line):
+    self._current_enum_entry += ' ' + line.strip()
+
+  def _FinalizeCurrentEnumDefinition(self):
+    if self._current_enum_entry:
+      self._ParseCurrentEnumEntry()
+    self._ApplyGeneratorDirectives()
+    self._current_definition.Finalize()
+    self._enum_definitions.append(self._current_definition)
+    self._current_definition = None
+    self._in_enum = False
+
+  def _ParseMultiLineDirectiveLine(self, line):
+    multi_line_directive_continuation = (
+        HeaderParser.multi_line_directive_continuation_re.match(line))
+    multi_line_directive_end = (
+        HeaderParser.multi_line_directive_end_re.match(line))
+
+    if multi_line_directive_continuation:
+      value_cont = multi_line_directive_continuation.groups()[0]
+      self._multi_line_generator_directive[1].append(value_cont)
+    elif multi_line_directive_end:
+      directive_name = self._multi_line_generator_directive[0]
+      directive_value = "".join(self._multi_line_generator_directive[1])
+      directive_value += multi_line_directive_end.groups()[0]
+      self._multi_line_generator_directive = None
+      self._generator_directives.Update(directive_name, directive_value)
+    else:
+      raise Exception('Malformed multi-line directive declaration in ' +
+                      self._path)
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive_error = HeaderParser.generator_error_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    multi_line_generator_directive_start = (
+        HeaderParser.multi_line_generator_directive_start_re.match(line))
+    single_line_enum = HeaderParser.enum_single_line_re.match(line)
+
+    if generator_directive_error:
+      raise Exception('Malformed directive declaration in ' + self._path +
+                      '. Use () for multi-line directives. E.g.\n' +
+                      '// GENERATED_JAVA_ENUM_PACKAGE: (\n' +
+                      '//   foo.package)')
+    elif generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives.Update(directive_name, directive_value)
+    elif multi_line_generator_directive_start:
+      directive_name = multi_line_generator_directive_start.groups()[0]
+      directive_value = multi_line_generator_directive_start.groups()[1]
+      self._multi_line_generator_directive = (directive_name, [directive_value])
+    elif enum_start or single_line_enum:
+      if self._generator_directives.empty:
+        return
+      self._current_definition = EnumDefinition(
+          original_enum_name=enum_start.groups()[1],
+          fixed_type=enum_start.groups()[3])
+      self._in_enum = True
+      if single_line_enum:
+        self._ParseSingleLineEnum(single_line_enum.group('enum_entries'))
+
+
+def DoGenerate(source_paths):
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    if not enum_definitions:
+      raise Exception('No enums found in %s\n'
+                      'Did you forget prefixing enums with '
+                      '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+                      source_path)
+    for enum_definition in enum_definitions:
+      output_path = java_cpp_utils.GetJavaFilePath(enum_definition.enum_package,
+                                                   enum_definition.class_name)
+      output = GenerateOutput(source_path, enum_definition)
+      yield output_path, output
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+${INT_DEF}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  enum_names = []
+  for enum_name, enum_value in enum_definition.entries.items():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_comments = enum_definition.comments.get(enum_name)
+    if enum_comments:
+      enum_comments_indent = '   * '
+      comments_line_wrapper = textwrap.TextWrapper(
+          initial_indent=enum_comments_indent,
+          subsequent_indent=enum_comments_indent,
+          width=100)
+      enum_entries_string.append('  /**')
+      enum_entries_string.append('\n'.join(
+          comments_line_wrapper.wrap(enum_comments)))
+      enum_entries_string.append('   */')
+    enum_entries_string.append(enum_template.substitute(values))
+    if enum_name != "NUM_ENTRIES":
+      enum_names.append(enum_definition.class_name + '.' + enum_name)
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  enum_names_indent = ' ' * 4
+  wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
+                                 subsequent_indent = enum_names_indent,
+                                 width = 100)
+  enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.enum_package,
+      'INT_DEF': enum_names_string,
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATH': source_path,
+      'YEAR': str(date.today().year)
+  }
+  return template.substitute(values)
+
+
+def DoMain(argv):
+  usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--srcjar',
+                    help='When specified, a .srcjar at the given path is '
+                    'created instead of individual .java files.')
+
+  options, args = parser.parse_args(argv)
+
+  if not args:
+    parser.error('Need to specify at least one input file')
+  input_paths = args
+
+  with build_utils.AtomicOutput(options.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      for output_path, data in DoGenerate(input_paths):
+        build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_enum.pydeps b/src/build/android/gyp/java_cpp_enum.pydeps
new file mode 100644
index 0000000..e6aaeb7
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../gn_helpers.py
+java_cpp_enum.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_enum_tests.py b/src/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..6d5f150
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,783 @@
+#!/usr/bin/env python3
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite contains various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput
+from java_cpp_enum import HeaderParser
+from util import java_cpp_utils
+
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')],
+                                comments=[('E2', 'This is a comment.'),
+                                          ('E1', 'This is a multiple line '
+                                                 'comment that is really long. '
+                                                 'This is a multiple line '
+                                                 'comment that is really '
+                                                 'really long.')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     path/to/file
+
+package some.package;
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+    ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
+  /**
+   * %s
+   * really really long.
+   */
+  int E1 = 1;
+  /**
+   * This is a comment.
+   */
+  int E2 = 2 << 2;
+}
+"""
+    long_comment = ('This is a multiple line comment that is really long. '
+                    'This is a multiple line comment that is')
+    self.assertEqual(
+        expected % (date.today().year, java_cpp_utils.GetScriptName(),
+                    long_comment), output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseBitShifts(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE = 1 << 1,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        ENUM_NAME_ZERO = 1 << 0,
+        ENUM_NAME_ONE = 1 << 1,
+        ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+                                              ('VALUE_ONE', '1 << 1')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    expected_entries = collections.OrderedDict([
+        ('ZERO', '1 << 0'),
+        ('ONE', '1 << 1'),
+        ('TWO', 'ZERO | ONE')])
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseMultilineEnumEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE =
+            SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey,
+        VALUE_TWO = 1 << 18,
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1 << 0'),
+        ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+         'ControlKey'),
+        ('VALUE_TWO', '1 << 18')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseEnumEntryWithTrailingMultilineEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1,
+        VALUE_ONE =
+            SymbolKey | FnKey | AltGrKey | MetaKey |
+            AltKey | ControlKey | ShiftKey,
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1'),
+        ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+         'ControlKey | ShiftKey')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseNoCommaAfterLastEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1,
+
+        // This is a multiline
+        //
+        // comment with an empty line.
+        VALUE_ONE = 2
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1'),
+        ('VALUE_ONE', '2')])
+    expected_comments = collections.OrderedDict([
+        ('VALUE_ONE', 'This is a multiline comment with an empty line.')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+    self.assertEqual(expected_comments, definition.comments)
+
+  def testParseClassNameOverride(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        FOO
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+      enum PrefixTest {
+        PREFIX_TEST_A,
+        PREFIX_TEST_B,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('OverrideName', definition.class_name)
+
+    definition = definitions[1]
+    self.assertEqual('OtherOverride', definition.class_name)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParsePreservesCommentsWhenPrefixStripping(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        // This comment spans
+        // two lines.
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+                     definition.comments)
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict(
+        [('B', 'This comment spans two lines.')]), definition.comments)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum AnEnum {
+        ENUM_ONE_A = 1,
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('AnEnum', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'),
+                                              ('ENUM_ONE_B', 'A')]),
+                     definition.entries)
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('P_A', 0),
+                                              ('P_B', 1)]),
+                     definition.entries)
+
+  def testParseSingleLineEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo { P_A, P_B };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseWithStrippingAndRelativeReferences(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A = 1,
+        // P_A is old-don't use P_A.
+        P_B = P_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]),
+                     definition.comments)
+
+  def testParseSingleLineAndRegularEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      enum EnumTwo { P_A, P_B };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        ENUM_NAME_FOO
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+                     definition.comments)
+
+    self.assertEqual(3, len(definitions))
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries)
+
+    definition = definitions[2]
+    self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries)
+
+  def testParseWithCamelCaseNames(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumTest {
+        EnumTestA = 1,
+        // comment for EnumTestB.
+        EnumTestB = 2,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_PREFIX_TO_STRIP: Test
+      enum AnEnum {
+        TestHTTPOption,
+        TestHTTPSOption,
+      };
+
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('A', '1'), ('B', '2')]),
+        definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([('B', 'comment for B.')]),
+        definition.comments)
+
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]),
+        definition.entries)
+
+  def testParseWithKCamelCaseNames(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        kEnumOne = 1,
+        // comment for kEnumTwo.
+        kEnumTwo = 2,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        kEnumNameFoo,
+        kEnumNameBar
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        kEnumNameFoo,
+        kEnumBar,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Keys {
+        kSymbolKey = 1 << 0,
+        kAltKey = 1 << 1,
+        kUpKey = 1 << 2,
+        kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Mixed {
+        kTestVal,
+        kCodecMPEG2
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]),
+        definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]),
+        definition.comments)
+
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('FOO', 0), ('BAR', 1)]),
+        definition.entries)
+
+    definition = definitions[2]
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]),
+        definition.entries)
+
+    definition = definitions[3]
+    expected_entries = collections.OrderedDict([
+        ('SYMBOL_KEY', '1 << 0'),
+        ('ALT_KEY', '1 << 1'),
+        ('UP_KEY', '1 << 2'),
+        ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')])
+    self.assertEqual(expected_entries, definition.entries)
+
+    definition = definitions[4]
+    self.assertEqual(
+        collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]),
+        definition.entries)
+
+  def testParseThrowsOnUnknownDirective(self):
+    test_data = """
+      // GENERATED_JAVA_UNKNOWN: Value
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseReturnsEmptyListWithoutDirectives(self):
+    test_data = """
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+  def testParseEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseEnumClassOneValueSubstringOfAnother(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class SafeBrowsingStatus {
+        kChecking = 0,
+        kEnabled = 1,
+        kDisabled = 2,
+        kDisabledByAdmin = 3,
+        kDisabledByExtension = 4,
+        kEnabledStandard = 5,
+        kEnabledEnhanced = 6,
+        // New enum values must go above here.
+        kMaxValue = kEnabledEnhanced,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('SafeBrowsingStatus', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(
+        collections.OrderedDict([
+            ('CHECKING', '0'),
+            ('ENABLED', '1'),
+            ('DISABLED', '2'),
+            ('DISABLED_BY_ADMIN', '3'),
+            ('DISABLED_BY_EXTENSION', '4'),
+            ('ENABLED_STANDARD', '5'),
+            ('ENABLED_ENHANCED', '6'),
+            ('MAX_VALUE', 'ENABLED_ENHANCED'),
+        ]), definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([
+            ('MAX_VALUE', 'New enum values must go above here.')
+        ]), definition.comments)
+
+  def testParseEnumStruct(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum struct Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Foo : int {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('int', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: unsigned short {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('unsigned short', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseUnknownFixedTypeRaises(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: foo_type {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseSimpleMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (te
+      //   st.name
+      //   space)
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+
+  def testParseMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+      //   Ba
+      //   r
+      //   )
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveShort(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveMissingBrackets(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE:
+      // test.namespace
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSet(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 2),
+                                              ('C', 3)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSetReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', 'B')
+    definition.AppendEntry('D', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 0),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'foo')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.AppendEntry('P_LAST', 'P_C')
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='p')
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition(original_enum_name='Name',
+                                enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], list(definition.entries.keys()))
+
+  def testGenerateThrowsOnEmptyInput(self):
+    with self.assertRaises(Exception):
+      original_do_parse = java_cpp_enum.DoParseHeaderFile
+      try:
+        java_cpp_enum.DoParseHeaderFile = lambda _: []
+        for _ in java_cpp_enum.DoGenerate(['file']):
+          pass
+      finally:
+        java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_cpp_features.py b/src/build/android/gyp/java_cpp_features.py
new file mode 100755
index 0000000..8e7c244
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+  # Ex. 'const base::Feature kConstantName{"StringNameOfTheFeature", ...};'
+  # would parse as:
+  #   ExtractConstantName() -> 'ConstantName'
+  #   ExtractValue() -> '"StringNameOfTheFeature"'
+  FEATURE_RE = re.compile(r'\s*const (?:base::)?Feature\s+k(\w+)\s*(?:=\s*)?{')
+  VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*,')
+
+  def ExtractConstantName(self, line):
+    match = FeatureParserDelegate.FEATURE_RE.match(line)
+    return match.group(1) if match else None
+
+  def ExtractValue(self, line):
+    match = FeatureParserDelegate.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def CreateJavaConstant(self, name, value, comments):
+    return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, features):
+  description_template = """
+    // This following string constants were inserted by
+    //     {SCRIPT_NAME}
+    // From
+    //     {SOURCE_PATHS}
+    // Into
+    //     {TEMPLATE_PATH}
+
+"""
+  values = {
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATHS': ',\n    //     '.join(source_paths),
+      'TEMPLATE_PATH': template_path,
+  }
+  description = description_template.format(**values)
+  native_features = '\n\n'.join(x.Format() for x in features)
+
+  values = {
+      'NATIVE_FEATURES': description + native_features,
+  }
+  return template.format(**values)
+
+
+def _ParseFeatureFile(path):
+  with open(path) as f:
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        FeatureParserDelegate(), f.readlines())
+  return feature_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+  with open(template_path) as f:
+    lines = f.readlines()
+
+  template = ''.join(lines)
+  package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+  output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+
+  features = []
+  for source_path in source_paths:
+    features.extend(_ParseFeatureFile(source_path))
+
+  output = _GenerateOutput(template, source_paths, template_path, features)
+  return output, output_path
+
+
+def _Main(argv):
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--srcjar',
+                      required=True,
+                      help='The path at which to generate the .srcjar file')
+
+  parser.add_argument('--template',
+                      required=True,
+                      help='The template file with which to generate the Java '
+                      'class. Must have "{NATIVE_FEATURES}" somewhere in '
+                      'the template.')
+
+  parser.add_argument('inputs',
+                      nargs='+',
+                      help='Input file(s)',
+                      metavar='INPUTFILE')
+  args = parser.parse_args(argv)
+
+  with build_utils.AtomicOutput(args.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      data, path = _Generate(args.inputs, args.template)
+      build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+  _Main(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_features.pydeps b/src/build/android/gyp/java_cpp_features.pydeps
new file mode 100644
index 0000000..acffae2
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py
+../../gn_helpers.py
+java_cpp_features.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_features_tests.py b/src/build/android/gyp/java_cpp_features_tests.py
new file mode 100755
index 0000000..5dcdcd8
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features_tests.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_cpp_features.py.
+
+This test suite contains various tests for the C++ -> Java base::Feature
+generator.
+"""
+
+import unittest
+
+import java_cpp_features
+from util import java_cpp_utils
+
+
+class _TestFeaturesParser(unittest.TestCase):
+  def testParseComments(self):
+    test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const base::Feature kSomeFeature{"SomeFeature",
+                                 base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Real comment that spans
+// multiple lines.
+const base::Feature kSomeOtherFeature{"SomeOtherFeature",
+                                      base::FEATURE_ENABLED_BY_DEFAULT};
+
+// Comment followed by nothing.
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(2, len(features))
+    self.assertEqual('SOME_FEATURE', features[0].name)
+    self.assertEqual('"SomeFeature"', features[0].value)
+    self.assertEqual(1, len(features[0].comments.split('\n')))
+    self.assertEqual('SOME_OTHER_FEATURE', features[1].name)
+    self.assertEqual('"SomeOtherFeature"', features[1].value)
+    self.assertEqual(2, len(features[1].comments.split('\n')))
+
+  def testWhitespace(self):
+    test_data = """
+// 1 line
+const base::Feature kShort{"Short", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 2 lines
+const base::Feature kTwoLineFeatureA{"TwoLineFeatureA",
+                                     base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kTwoLineFeatureB{
+    "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 3 lines
+const base::Feature kFeatureWithAVeryLongNameThatWillHaveToWrap{
+    "FeatureWithAVeryLongNameThatWillHaveToWrap",
+    base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(4, len(features))
+    self.assertEqual('SHORT', features[0].name)
+    self.assertEqual('"Short"', features[0].value)
+    self.assertEqual('TWO_LINE_FEATURE_A', features[1].name)
+    self.assertEqual('"TwoLineFeatureA"', features[1].value)
+    self.assertEqual('TWO_LINE_FEATURE_B', features[2].name)
+    self.assertEqual('"TwoLineFeatureB"', features[2].value)
+    self.assertEqual('FEATURE_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+                     features[3].name)
+    self.assertEqual('"FeatureWithAVeryLongNameThatWillHaveToWrap"',
+                     features[3].value)
+
+  def testCppSyntax(self):
+    test_data = """
+// Mismatched name
+const base::Feature kMismatchedFeature{"MismatchedName",
+    base::FEATURE_DISABLED_BY_DEFAULT};
+
+namespace myfeature {
+// In a namespace
+const base::Feature kSomeFeature{"SomeFeature",
+                                 base::FEATURE_DISABLED_BY_DEFAULT};
+}
+
+// Defined with equals sign
+const base::Feature kFoo = {"Foo", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Build config-specific base::Feature
+#if defined(OS_ANDROID)
+const base::Feature kAndroidOnlyFeature{"AndroidOnlyFeature",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+#endif
+
+// Value depends on build config
+const base::Feature kMaybeEnabled{"MaybeEnabled",
+#if defined(OS_ANDROID)
+    base::FEATURE_DISABLED_BY_DEFAULT
+#else
+    base::FEATURE_ENABLED_BY_DEFAULT
+#endif
+};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(5, len(features))
+    self.assertEqual('MISMATCHED_FEATURE', features[0].name)
+    self.assertEqual('"MismatchedName"', features[0].value)
+    self.assertEqual('SOME_FEATURE', features[1].name)
+    self.assertEqual('"SomeFeature"', features[1].value)
+    self.assertEqual('FOO', features[2].name)
+    self.assertEqual('"Foo"', features[2].value)
+    self.assertEqual('ANDROID_ONLY_FEATURE', features[3].name)
+    self.assertEqual('"AndroidOnlyFeature"', features[3].value)
+    self.assertEqual('MAYBE_ENABLED', features[4].name)
+    self.assertEqual('"MaybeEnabled"', features[4].value)
+
+  def testNotYetSupported(self):
+    # Negative test for cases we don't yet support, to ensure we don't misparse
+    # these until we intentionally add proper support.
+    test_data = """
+// Not currently supported: name depends on C++ directive
+const base::Feature kNameDependsOnOs{
+#if defined(OS_ANDROID)
+    "MaybeName1",
+#else
+    "MaybeName2",
+#endif
+    base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Not currently supported: feature named with a constant instead of literal
+const base::Feature kNamedAfterConstant{kNamedStringConstant,
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(0, len(features))
+
+  def testTreatWebViewLikeOneWord(self):
+    test_data = """
+const base::Feature kSomeWebViewFeature{"SomeWebViewFeature",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kWebViewOtherFeature{"WebViewOtherFeature",
+                                         base::FEATURE_ENABLED_BY_DEFAULT};
+const base::Feature kFeatureWithPluralWebViews{
+    "FeatureWithPluralWebViews",
+    base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('SOME_WEBVIEW_FEATURE', features[0].name)
+    self.assertEqual('"SomeWebViewFeature"', features[0].value)
+    self.assertEqual('WEBVIEW_OTHER_FEATURE', features[1].name)
+    self.assertEqual('"WebViewOtherFeature"', features[1].value)
+    self.assertEqual('FEATURE_WITH_PLURAL_WEBVIEWS', features[2].name)
+    self.assertEqual('"FeatureWithPluralWebViews"', features[2].value)
+
+  def testSpecialCharacters(self):
+    test_data = r"""
+const base::Feature kFeatureWithEscapes{"Weird\tfeature\"name\n",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kFeatureWithEscapes2{
+    "Weird\tfeature\"name\n",
+    base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('FEATURE_WITH_ESCAPES', features[0].name)
+    self.assertEqual(r'"Weird\tfeature\"name\n"', features[0].value)
+    self.assertEqual('FEATURE_WITH_ESCAPES2', features[1].name)
+    self.assertEqual(r'"Weird\tfeature\"name\n"', features[1].value)
+
+  def testNoBaseNamespacePrefix(self):
+    test_data = """
+const Feature kSomeFeature{"SomeFeature", FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('SOME_FEATURE', features[0].name)
+    self.assertEqual('"SomeFeature"', features[0].value)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_cpp_strings.py b/src/build/android/gyp/java_cpp_strings.py
new file mode 100755
index 0000000..d713599
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+  STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=')
+  VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*;')
+
+  def ExtractConstantName(self, line):
+    match = StringParserDelegate.STRING_RE.match(line)
+    return match.group(1) if match else None
+
+  def ExtractValue(self, line):
+    match = StringParserDelegate.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def CreateJavaConstant(self, name, value, comments):
+    return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, strings):
+  description_template = """
+    // This following string constants were inserted by
+    //     {SCRIPT_NAME}
+    // From
+    //     {SOURCE_PATHS}
+    // Into
+    //     {TEMPLATE_PATH}
+
+"""
+  values = {
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATHS': ',\n    //     '.join(source_paths),
+      'TEMPLATE_PATH': template_path,
+  }
+  description = description_template.format(**values)
+  native_strings = '\n\n'.join(x.Format() for x in strings)
+
+  values = {
+      'NATIVE_STRINGS': description + native_strings,
+  }
+  return template.format(**values)
+
+
+def _ParseStringFile(path):
+  with open(path) as f:
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        StringParserDelegate(), f.readlines())
+  return string_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+  with open(template_path) as f:
+    lines = f.readlines()
+
+  template = ''.join(lines)
+  package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+  output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+  strings = []
+  for source_path in source_paths:
+    strings.extend(_ParseStringFile(source_path))
+
+  output = _GenerateOutput(template, source_paths, template_path, strings)
+  return output, output_path
+
+
+def _Main(argv):
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--srcjar',
+                      required=True,
+                      help='The path at which to generate the .srcjar file')
+
+  parser.add_argument('--template',
+                      required=True,
+                      help='The template file with which to generate the Java '
+                      'class. Must have "{NATIVE_STRINGS}" somewhere in '
+                      'the template.')
+
+  parser.add_argument(
+      'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
+  args = parser.parse_args(argv)
+
+  with build_utils.AtomicOutput(args.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      data, path = _Generate(args.inputs, args.template)
+      build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+  _Main(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_strings.pydeps b/src/build/android/gyp/java_cpp_strings.pydeps
new file mode 100644
index 0000000..0a821f4
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../gn_helpers.py
+java_cpp_strings.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_strings_tests.py b/src/build/android/gyp/java_cpp_strings_tests.py
new file mode 100755
index 0000000..4cb1eee
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings_tests.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_cpp_strings.py.
+
+This test suite contains various tests for the C++ -> Java string generator.
+"""
+
+import unittest
+
+import java_cpp_strings
+from util import java_cpp_utils
+
+
+class _TestStringsParser(unittest.TestCase):
+
+  def testParseComments(self):
+    test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const char kASwitch[] = "a-value";
+
+// Real comment that spans
+// multiple lines.
+const char kAnotherSwitch[] = "another-value";
+
+// Comment followed by nothing.
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual(2, len(strings))
+    self.assertEqual('A_SWITCH', strings[0].name)
+    self.assertEqual('"a-value"', strings[0].value)
+    self.assertEqual(1, len(strings[0].comments.split('\n')))
+    self.assertEqual('ANOTHER_SWITCH', strings[1].name)
+    self.assertEqual('"another-value"', strings[1].value)
+    self.assertEqual(2, len(strings[1].comments.split('\n')))
+
+  def testStringValues(self):
+    test_data = r"""
+// Single line string constants.
+const char kAString[] = "a-value";
+const char kNoComment[] = "no-comment";
+
+namespace myfeature {
+const char kMyFeatureNoComment[] = "myfeature.no-comment";
+}
+
+// Single line switch with a big space.
+const char kAStringWithSpace[]                      = "a-value";
+
+// Wrapped constant definition.
+const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
+    "a-string-with-a-very-long-name-that-will-have-to-wrap";
+
+// This one has no comment before it.
+
+const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] =
+    "a-string-with-a-very-long-name-that-will-have-to-wrap2";
+
+const char kStringWithEscapes[] = "tab\tquote\"newline\n";
+const char kStringWithEscapes2[] =
+    "tab\tquote\"newline\n";
+
+const char kEmptyString[] = "";
+
+// These are valid C++ but not currently supported by the script.
+const char kInvalidLineBreak[] =
+
+    "invalid-line-break";
+
+const char kConcatenateMultipleStringLiterals[] =
+    "first line"
+    "second line";
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual(9, len(strings))
+    self.assertEqual('A_STRING', strings[0].name)
+    self.assertEqual('"a-value"', strings[0].value)
+    self.assertEqual('NO_COMMENT', strings[1].name)
+    self.assertEqual('"no-comment"', strings[1].value)
+    self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name)
+    self.assertEqual('"myfeature.no-comment"', strings[2].value)
+    self.assertEqual('A_STRING_WITH_SPACE', strings[3].name)
+    self.assertEqual('"a-value"', strings[3].value)
+    self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+                     strings[4].name)
+    self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
+                     strings[4].value)
+    self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2',
+                     strings[5].name)
+    self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"',
+                     strings[5].value)
+    self.assertEqual('STRING_WITH_ESCAPES', strings[6].name)
+    self.assertEqual(r'"tab\tquote\"newline\n"', strings[6].value)
+    self.assertEqual('STRING_WITH_ESCAPES2', strings[7].name)
+    self.assertEqual(r'"tab\tquote\"newline\n"', strings[7].value)
+    self.assertEqual('EMPTY_STRING', strings[8].name)
+    self.assertEqual('""', strings[8].value)
+
+  def testTreatWebViewLikeOneWord(self):
+    test_data = """
+const char kSomeWebViewSwitch[] = "some-webview-switch";
+const char kWebViewOtherSwitch[] = "webview-other-switch";
+const char kSwitchWithPluralWebViews[] = "switch-with-plural-webviews";
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual('SOME_WEBVIEW_SWITCH', strings[0].name)
+    self.assertEqual('"some-webview-switch"', strings[0].value)
+    self.assertEqual('WEBVIEW_OTHER_SWITCH', strings[1].name)
+    self.assertEqual('"webview-other-switch"', strings[1].value)
+    self.assertEqual('SWITCH_WITH_PLURAL_WEBVIEWS', strings[2].name)
+    self.assertEqual('"switch-with-plural-webviews"', strings[2].value)
+
+  def testTemplateParsing(self):
+    test_data = """
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package my.java.package;
+
+public any sort of class MyClass {{
+
+{NATIVE_STRINGS}
+
+}}
+""".split('\n')
+    package, class_name = java_cpp_utils.ParseTemplateFile(test_data)
+    self.assertEqual('my.java.package', package)
+    self.assertEqual('MyClass', class_name)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_google_api_keys.py b/src/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000..a58628a
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+    os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+  return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+  template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+  constant_template = string.Template(
+      '  public static final String ${NAME} = "${VALUE}";')
+  constant_entries_list = []
+  for constant_name, constant_value in constant_definitions.items():
+    values = {
+        'NAME': constant_name,
+        'VALUE': constant_value,
+    }
+    constant_entries_list.append(constant_template.substitute(values))
+  constant_entries_string = '\n'.join(constant_entries_list)
+
+  values = {
+      'CLASS_NAME': CLASSNAME,
+      'CONSTANT_ENTRIES': constant_entries_string,
+      'PACKAGE': PACKAGE,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+  }
+  return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with zipfile.ZipFile(output_path, 'w') as srcjar:
+    path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+    data = GenerateOutput(constant_definition)
+    build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--out", help="Path for java output.")
+  parser.add_argument("--srcjar", help="Path for srcjar output.")
+  options = parser.parse_args(argv)
+  if not options.out and not options.srcjar:
+    parser.print_help()
+    sys.exit(-1)
+
+  values = {}
+  values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+  values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+      GetAPIKeyPhysicalWebTest())
+  values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+  values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+  values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+  values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+      'REMOTING')
+  values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+      'REMOTING_HOST')
+  values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+      GetClientSecret('REMOTING_HOST'))
+  values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+      GetClientID('REMOTING_IDENTITY_API'))
+
+  if options.out:
+    _DoWriteJavaOutput(options.out, values)
+  if options.srcjar:
+    _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+  _DoMain(sys.argv[1:])
diff --git a/src/build/android/gyp/java_google_api_keys.pydeps b/src/build/android/gyp/java_google_api_keys.pydeps
new file mode 100644
index 0000000..ebb7172
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py
+../../../google_apis/google_api_keys.py
+../../gn_helpers.py
+java_google_api_keys.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/java_google_api_keys_tests.py b/src/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000..e00e86c
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import unittest
+
+import java_google_api_keys
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+  def testOutput(self):
+    definition = {'E1': 'abc', 'E2': 'defgh'}
+    output = java_google_api_keys.GenerateOutput(definition)
+    expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+  public static final String E1 = "abc";
+  public static final String E2 = "defgh";
+}
+"""
+    self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/jetify_jar.py b/src/build/android/gyp/jetify_jar.py
new file mode 100755
index 0000000..e97ad97
--- /dev/null
+++ b/src/build/android/gyp/jetify_jar.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+  """Adds arguments related to jetifying to parser.
+
+  Args:
+    parser: ArgumentParser object.
+  """
+  parser.add_argument(
+      '--input-path',
+      required=True,
+      help='Path to input file(s). Either the classes '
+      'directory, or the path to a jar.')
+  parser.add_argument(
+      '--output-path',
+      required=True,
+      help='Path to output final file(s) to. Either the '
+      'final classes directory, or the directory in '
+      'which to place the instrumented/copied jar.')
+  parser.add_argument(
+      '--jetify-path', required=True, help='Path to jetify bin.')
+  parser.add_argument(
+      '--jetify-config-path', required=True, help='Path to jetify config file.')
+
+
+def _RunJetifyCommand(parser):
+  args = parser.parse_args()
+  cmd = [
+      args.jetify_path,
+      '-i',
+      args.input_path,
+      '-o',
+      args.output_path,
+      # Need to suppress a lot of warning output when jar doesn't have
+      # any references rewritten.
+      '-l',
+      'error'
+  ]
+  if args.jetify_config_path:
+    cmd.extend(['-c', args.jetify_config_path])
+  # Must wait for jetify command to complete to prevent race condition.
+  env = os.environ.copy()
+  env['JAVA_HOME'] = build_utils.JAVA_HOME
+  subprocess.check_call(cmd, env=env)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  _AddArguments(parser)
+  _RunJetifyCommand(parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/jetify_jar.pydeps b/src/build/android/gyp/jetify_jar.pydeps
new file mode 100644
index 0000000..6a1a589
--- /dev/null
+++ b/src/build/android/gyp/jetify_jar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jetify_jar.pydeps build/android/gyp/jetify_jar.py
+../../gn_helpers.py
+jetify_jar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/jinja_template.py b/src/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000..d42189b
--- /dev/null
+++ b/src/build/android/gyp/jinja_template.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import argparse
+import os
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2  # pylint: disable=F0401
+
+
+class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
+  def __init__(self, searchpath):
+    jinja2.FileSystemLoader.__init__(self, searchpath)
+    self.loaded_templates = set()
+
+  def get_source(self, environment, template):
+    contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+        self, environment, template)
+    self.loaded_templates.add(os.path.relpath(filename))
+    return contents, filename, uptodate
+
+
+class JinjaProcessor(object):
+  """Allows easy rendering of jinja templates with input file tracking."""
+  def __init__(self, loader_base_dir, variables=None):
+    self.loader_base_dir = loader_base_dir
+    self.variables = variables or {}
+    self.loader = _RecordingFileSystemLoader(loader_base_dir)
+    self.env = jinja2.Environment(loader=self.loader)
+    self.env.undefined = jinja2.StrictUndefined
+    self.env.line_comment_prefix = '##'
+    self.env.trim_blocks = True
+    self.env.lstrip_blocks = True
+    self._template_cache = {}  # Map of path -> Template
+
+  def Render(self, input_filename, variables=None):
+    input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
+    template = self._template_cache.get(input_rel_path)
+    if not template:
+      template = self.env.get_template(input_rel_path)
+      self._template_cache[input_rel_path] = template
+    return template.render(variables or self.variables)
+
+  def GetLoadedTemplates(self):
+    return list(self.loader.loaded_templates)
+
+
+def _ProcessFile(processor, input_filename, output_filename):
+  output = processor.Render(input_filename)
+
+  # If |output| is same with the file content, we skip update and
+  # ninja's restat will avoid rebuilding things that depend on it.
+  if os.path.isfile(output_filename):
+    with codecs.open(output_filename, 'r', 'utf-8') as f:
+      if f.read() == output:
+        return
+
+  with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+    output_file.write(output)
+
+
+def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
+  with build_utils.TempDir() as temp_dir:
+    path_info = resource_utils.ResourceInfoFile()
+    for input_filename in input_filenames:
+      relpath = os.path.relpath(os.path.abspath(input_filename),
+                                os.path.abspath(inputs_base_dir))
+      if relpath.startswith(os.pardir):
+        raise Exception('input file %s is not contained in inputs base dir %s'
+                        % (input_filename, inputs_base_dir))
+
+      output_filename = os.path.join(temp_dir, relpath)
+      parent_dir = os.path.dirname(output_filename)
+      build_utils.MakeDirectory(parent_dir)
+      _ProcessFile(processor, input_filename, output_filename)
+      path_info.AddMapping(relpath, input_filename)
+
+    path_info.Write(outputs_zip + '.info')
+    build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def _ParseVariables(variables_arg, error_func):
+  variables = {}
+  for v in build_utils.ParseGnList(variables_arg):
+    if '=' not in v:
+      error_func('--variables argument must contain "=": ' + v)
+    name, _, value = v.partition('=')
+    variables[name] = value
+  return variables
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--inputs', required=True,
+                      help='GN-list of template files to process.')
+  parser.add_argument('--includes', default='',
+                      help="GN-list of files that get {% include %}'ed.")
+  parser.add_argument('--output', help='The output file to generate. Valid '
+                      'only if there is a single input.')
+  parser.add_argument('--outputs-zip', help='A zip file for the processed '
+                      'templates. Required if there are multiple inputs.')
+  parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
+                      'of the inputs. Each output\'s path in the output zip '
+                      'will match the relative path from INPUTS_BASE_DIR to '
+                      'the input. Required if --output-zip is given.')
+  parser.add_argument('--loader-base-dir', help='Base path used by the '
+                      'template loader. Must be a common ancestor directory of '
+                      'the inputs. Defaults to DIR_SOURCE_ROOT.',
+                      default=host_paths.DIR_SOURCE_ROOT)
+  parser.add_argument('--variables', help='Variables to be made available in '
+                      'the template processing environment, as a GYP list '
+                      '(e.g. --variables "channel=beta mstone=39")', default='')
+  parser.add_argument('--check-includes', action='store_true',
+                      help='Enable inputs and includes checks.')
+  options = parser.parse_args()
+
+  inputs = build_utils.ParseGnList(options.inputs)
+  includes = build_utils.ParseGnList(options.includes)
+
+  if (options.output is None) == (options.outputs_zip is None):
+    parser.error('Exactly one of --output and --output-zip must be given')
+  if options.output and len(inputs) != 1:
+    parser.error('--output cannot be used with multiple inputs')
+  if options.outputs_zip and not options.inputs_base_dir:
+    parser.error('--inputs-base-dir must be given when --output-zip is used')
+
+  variables = _ParseVariables(options.variables, parser.error)
+  processor = JinjaProcessor(options.loader_base_dir, variables=variables)
+
+  if options.output:
+    _ProcessFile(processor, inputs[0], options.output)
+  else:
+    _ProcessFiles(processor, inputs, options.inputs_base_dir,
+                  options.outputs_zip)
+
+  if options.check_includes:
+    all_inputs = set(processor.GetLoadedTemplates())
+    all_inputs.difference_update(inputs)
+    all_inputs.difference_update(includes)
+    if all_inputs:
+      raise Exception('Found files not listed via --includes:\n' +
+                      '\n'.join(sorted(all_inputs)))
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/jinja_template.pydeps b/src/build/android/gyp/jinja_template.pydeps
new file mode 100644
index 0000000..af22c40
--- /dev/null
+++ b/src/build/android/gyp/jinja_template.pydeps
@@ -0,0 +1,42 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+jinja_template.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/lint.py b/src/build/android/gyp/lint.py
new file mode 100755
index 0000000..faad21c
--- /dev/null
+++ b/src/build/android/gyp/lint.py
@@ -0,0 +1,489 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Runs Android's lint tool."""
+
+from __future__ import print_function
+
+import argparse
+import functools
+import logging
+import os
+import re
+import shutil
+import sys
+import time
+import traceback
+from xml.dom import minidom
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import manifest_utils
+from util import server_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md'  # pylint: disable=line-too-long
+
+# These checks are not useful for chromium.
+_DISABLED_ALWAYS = [
+    "AppCompatResource",  # Lint does not correctly detect our appcompat lib.
+    "Assert",  # R8 --force-enable-assertions is used to enable java asserts.
+    "InflateParams",  # Null is ok when inflating views for dialogs.
+    "InlinedApi",  # Constants are copied so they are always available.
+    "LintBaseline",  # Don't warn about using baseline.xml files.
+    "MissingApplicationIcon",  # False positive for non-production targets.
+    "SwitchIntDef",  # Many C++ enums are not used at all in java.
+    "UniqueConstants",  # Chromium enums allow aliases.
+    "UnusedAttribute",  # Chromium apks have various minSdkVersion values.
+    "ObsoleteLintCustomCheck",  # We have no control over custom lint checks.
+]
+
+# These checks are not useful for test targets and adds an unnecessary burden
+# to suppress them.
+_DISABLED_FOR_TESTS = [
+    # We should not require test strings.xml files to explicitly add
+    # translatable=false since they are not translated and not used in
+    # production.
+    "MissingTranslation",
+    # Test strings.xml files often have simple names and are not translatable,
+    # so it may conflict with a production string and cause this error.
+    "Untranslatable",
+    # Test targets often use the same strings target and resources target as the
+    # production targets but may not use all of them.
+    "UnusedResources",
+    # TODO(wnwen): Turn this back on since to crash it would require running on
+    #     a device with all the various minSdkVersions.
+    # Real NewApi violations crash the app, so the only ones that lint catches
+    # but tests still succeed are false positives.
+    "NewApi",
+    # Tests should be allowed to access these methods/classes.
+    "VisibleForTests",
+]
+
+_RES_ZIP_DIR = 'RESZIPS'
+_SRCJAR_DIR = 'SRCJARS'
+_AAR_DIR = 'AARS'
+
+
+def _SrcRelative(path):
+  """Returns relative path to top-level src dir."""
+  return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT)
+
+
+def _GenerateProjectFile(android_manifest,
+                         android_sdk_root,
+                         cache_dir,
+                         sources=None,
+                         classpath=None,
+                         srcjar_sources=None,
+                         resource_sources=None,
+                         custom_lint_jars=None,
+                         custom_annotation_zips=None,
+                         android_sdk_version=None):
+  project = ElementTree.Element('project')
+  root = ElementTree.SubElement(project, 'root')
+  # Run lint from output directory: crbug.com/1115594
+  root.set('dir', os.getcwd())
+  sdk = ElementTree.SubElement(project, 'sdk')
+  # Lint requires that the sdk path be an absolute path.
+  sdk.set('dir', os.path.abspath(android_sdk_root))
+  cache = ElementTree.SubElement(project, 'cache')
+  cache.set('dir', cache_dir)
+  main_module = ElementTree.SubElement(project, 'module')
+  main_module.set('name', 'main')
+  main_module.set('android', 'true')
+  main_module.set('library', 'false')
+  if android_sdk_version:
+    main_module.set('compile_sdk_version', android_sdk_version)
+  manifest = ElementTree.SubElement(main_module, 'manifest')
+  manifest.set('file', android_manifest)
+  if srcjar_sources:
+    for srcjar_file in srcjar_sources:
+      src = ElementTree.SubElement(main_module, 'src')
+      src.set('file', srcjar_file)
+  if sources:
+    for source in sources:
+      src = ElementTree.SubElement(main_module, 'src')
+      src.set('file', source)
+  if classpath:
+    for file_path in classpath:
+      classpath_element = ElementTree.SubElement(main_module, 'classpath')
+      classpath_element.set('file', file_path)
+  if resource_sources:
+    for resource_file in resource_sources:
+      resource = ElementTree.SubElement(main_module, 'resource')
+      resource.set('file', resource_file)
+  if custom_lint_jars:
+    for lint_jar in custom_lint_jars:
+      lint = ElementTree.SubElement(main_module, 'lint-checks')
+      lint.set('file', lint_jar)
+  if custom_annotation_zips:
+    for annotation_zip in custom_annotation_zips:
+      annotation = ElementTree.SubElement(main_module, 'annotations')
+      annotation.set('file', annotation_zip)
+  return project
+
+
+def _RetrieveBackportedMethods(backported_methods_path):
+  with open(backported_methods_path) as f:
+    methods = f.read().splitlines()
+  # Methods look like:
+  #   java/util/Set#of(Ljava/lang/Object;)Ljava/util/Set;
+  # But error message looks like:
+  #   Call requires API level R (current min is 21): java.util.Set#of [NewApi]
+  methods = (m.replace('/', '\\.') for m in methods)
+  methods = (m[:m.index('(')] for m in methods)
+  return sorted(set(methods))
+
+
+def _GenerateConfigXmlTree(orig_config_path, backported_methods):
+  if orig_config_path:
+    root_node = ElementTree.parse(orig_config_path).getroot()
+  else:
+    root_node = ElementTree.fromstring('<lint/>')
+
+  issue_node = ElementTree.SubElement(root_node, 'issue')
+  issue_node.attrib['id'] = 'NewApi'
+  ignore_node = ElementTree.SubElement(issue_node, 'ignore')
+  ignore_node.attrib['regexp'] = '|'.join(backported_methods)
+  return root_node
+
+
+def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths,
+                             min_sdk_version, android_sdk_version):
+  # Set minSdkVersion in the manifest to the correct value.
+  doc, manifest, app_node = manifest_utils.ParseManifest(original_manifest_path)
+
+  # TODO(crbug.com/1126301): Should this be done using manifest merging?
+  # Add anything in the application node of the extra manifests to the main
+  # manifest to prevent unused resource errors.
+  for path in extra_manifest_paths:
+    _, _, extra_app_node = manifest_utils.ParseManifest(path)
+    for node in extra_app_node:
+      app_node.append(node)
+
+  if app_node.find(
+      '{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE) is None:
+    # Assume no backup is intended, appeases AllowBackup lint check and keeping
+    # it working for manifests that do define android:allowBackup.
+    app_node.set('{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE, 'false')
+
+  uses_sdk = manifest.find('./uses-sdk')
+  if uses_sdk is None:
+    uses_sdk = ElementTree.Element('uses-sdk')
+    manifest.insert(0, uses_sdk)
+  uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+               min_sdk_version)
+  uses_sdk.set('{%s}targetSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+               android_sdk_version)
+  return doc
+
+
+def _WriteXmlFile(root, path):
+  logging.info('Writing xml file %s', path)
+  build_utils.MakeDirectory(os.path.dirname(path))
+  with build_utils.AtomicOutput(path) as f:
+    # Although we can write it just with ElementTree.tostring, using minidom
+    # makes it a lot easier to read as a human (also on code search).
+    f.write(
+        minidom.parseString(ElementTree.tostring(
+            root, encoding='utf-8')).toprettyxml(indent='  ').encode('utf-8'))
+
+
+def _RunLint(lint_binary_path,
+             backported_methods_path,
+             config_path,
+             manifest_path,
+             extra_manifest_paths,
+             sources,
+             classpath,
+             cache_dir,
+             android_sdk_version,
+             aars,
+             srcjars,
+             min_sdk_version,
+             resource_sources,
+             resource_zips,
+             android_sdk_root,
+             lint_gen_dir,
+             baseline,
+             testonly_target=False,
+             warnings_as_errors=False):
+  logging.info('Lint starting')
+
+  cmd = [
+      lint_binary_path,
+      '--quiet',  # Silences lint's "." progress updates.
+      '--disable',
+      ','.join(_DISABLED_ALWAYS),
+  ]
+  if baseline:
+    cmd.extend(['--baseline', baseline])
+  if testonly_target:
+    cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
+
+  if not manifest_path:
+    manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
+                                 'android', 'AndroidManifest.xml')
+
+  logging.info('Generating config.xml')
+  backported_methods = _RetrieveBackportedMethods(backported_methods_path)
+  config_xml_node = _GenerateConfigXmlTree(config_path, backported_methods)
+  generated_config_path = os.path.join(lint_gen_dir, 'config.xml')
+  _WriteXmlFile(config_xml_node, generated_config_path)
+  cmd.extend(['--config', generated_config_path])
+
+  logging.info('Generating Android manifest file')
+  android_manifest_tree = _GenerateAndroidManifest(manifest_path,
+                                                   extra_manifest_paths,
+                                                   min_sdk_version,
+                                                   android_sdk_version)
+  # Include the rebased manifest_path in the lint generated path so that it is
+  # clear in error messages where the original AndroidManifest.xml came from.
+  lint_android_manifest_path = os.path.join(lint_gen_dir, manifest_path)
+  _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)
+
+  resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
+  # These are zip files with generated resources (e. g. strings from GRD).
+  logging.info('Extracting resource zips')
+  for resource_zip in resource_zips:
+    # Use a consistent root and name rather than a temporary file so that
+    # suppressions can be local to the lint target and the resource target.
+    resource_dir = os.path.join(resource_root_dir, resource_zip)
+    shutil.rmtree(resource_dir, True)
+    os.makedirs(resource_dir)
+    resource_sources.extend(
+        build_utils.ExtractAll(resource_zip, path=resource_dir))
+
+  logging.info('Extracting aars')
+  aar_root_dir = os.path.join(lint_gen_dir, _AAR_DIR)
+  custom_lint_jars = []
+  custom_annotation_zips = []
+  if aars:
+    for aar in aars:
+      # Use relative source for aar files since they are not generated.
+      aar_dir = os.path.join(aar_root_dir,
+                             os.path.splitext(_SrcRelative(aar))[0])
+      shutil.rmtree(aar_dir, True)
+      os.makedirs(aar_dir)
+      aar_files = build_utils.ExtractAll(aar, path=aar_dir)
+      for f in aar_files:
+        if f.endswith('lint.jar'):
+          custom_lint_jars.append(f)
+        elif f.endswith('annotations.zip'):
+          custom_annotation_zips.append(f)
+
+  logging.info('Extracting srcjars')
+  srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
+  srcjar_sources = []
+  if srcjars:
+    for srcjar in srcjars:
+      # Use path without extensions since otherwise the file name includes
+      # .srcjar and lint treats it as a srcjar.
+      srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
+      shutil.rmtree(srcjar_dir, True)
+      os.makedirs(srcjar_dir)
+      # Sadly lint's srcjar support is broken since it only considers the first
+      # srcjar. Until we roll a lint version with that fixed, we need to extract
+      # it ourselves.
+      srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))
+
+  logging.info('Generating project file')
+  project_file_root = _GenerateProjectFile(lint_android_manifest_path,
+                                           android_sdk_root, cache_dir, sources,
+                                           classpath, srcjar_sources,
+                                           resource_sources, custom_lint_jars,
+                                           custom_annotation_zips,
+                                           android_sdk_version)
+
+  project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
+  _WriteXmlFile(project_file_root, project_xml_path)
+  cmd += ['--project', project_xml_path]
+
+  logging.info('Preparing environment variables')
+  env = os.environ.copy()
+  # It is important that lint uses the checked-in JDK11 as it is almost 50%
+  # faster than JDK8.
+  env['JAVA_HOME'] = build_utils.JAVA_HOME
+  # This is necessary so that lint errors print stack traces in stdout.
+  env['LINT_PRINT_STACKTRACE'] = 'true'
+  if baseline and not os.path.exists(baseline):
+    # Generating new baselines is only done locally, and requires more memory to
+    # avoid OOMs.
+    env['LINT_OPTS'] = '-Xmx4g'
+  else:
+    # The default set in the wrapper script is 1g, but it seems not enough :(
+    env['LINT_OPTS'] = '-Xmx2g'
+
+  # This filter is necessary for JDK11.
+  stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
+  stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found')
+
+  start = time.time()
+  logging.debug('Lint command %s', ' '.join(cmd))
+  failed = True
+  try:
+    failed = bool(
+        build_utils.CheckOutput(cmd,
+                                env=env,
+                                print_stdout=True,
+                                stdout_filter=stdout_filter,
+                                stderr_filter=stderr_filter,
+                                fail_on_output=warnings_as_errors))
+  finally:
+    # When not treating warnings as errors, display the extra footer.
+    is_debug = os.environ.get('LINT_DEBUG', '0') != '0'
+
+    if failed:
+      print('- For more help with lint in Chrome:', _LINT_MD_URL)
+      if is_debug:
+        print('- DEBUG MODE: Here is the project.xml: {}'.format(
+            _SrcRelative(project_xml_path)))
+      else:
+        print('- Run with LINT_DEBUG=1 to enable lint configuration debugging')
+
+    end = time.time() - start
+    logging.info('Lint command took %ss', end)
+    if not is_debug:
+      shutil.rmtree(aar_root_dir, ignore_errors=True)
+      shutil.rmtree(resource_root_dir, ignore_errors=True)
+      shutil.rmtree(srcjar_root_dir, ignore_errors=True)
+      os.unlink(project_xml_path)
+
+  logging.info('Lint completed')
+
+
+def _ParseArgs(argv):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--target-name', help='Fully qualified GN target name.')
+  parser.add_argument('--skip-build-server',
+                      action='store_true',
+                      help='Avoid using the build server.')
+  parser.add_argument('--lint-binary-path',
+                      required=True,
+                      help='Path to lint executable.')
+  parser.add_argument('--backported-methods',
+                      help='Path to backported methods file created by R8.')
+  parser.add_argument('--cache-dir',
+                      required=True,
+                      help='Path to the directory in which the android cache '
+                      'directory tree should be stored.')
+  parser.add_argument('--config-path', help='Path to lint suppressions file.')
+  parser.add_argument('--lint-gen-dir',
+                      required=True,
+                      help='Path to store generated xml files.')
+  parser.add_argument('--stamp', help='Path to stamp upon success.')
+  parser.add_argument('--android-sdk-version',
+                      help='Version (API level) of the Android SDK used for '
+                      'building.')
+  parser.add_argument('--min-sdk-version',
+                      required=True,
+                      help='Minimal SDK version to lint against.')
+  parser.add_argument('--android-sdk-root',
+                      required=True,
+                      help='Lint needs an explicit path to the android sdk.')
+  parser.add_argument('--testonly',
+                      action='store_true',
+                      help='If set, some checks like UnusedResources will be '
+                      'disabled since they are not helpful for test '
+                      'targets.')
+  parser.add_argument('--create-cache',
+                      action='store_true',
+                      help='Whether this invocation is just warming the cache.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--java-sources',
+                      help='File containing a list of java sources files.')
+  parser.add_argument('--aars', help='GN list of included aars.')
+  parser.add_argument('--srcjars', help='GN list of included srcjars.')
+  parser.add_argument('--manifest-path',
+                      help='Path to original AndroidManifest.xml')
+  parser.add_argument('--extra-manifest-paths',
+                      action='append',
+                      help='GYP-list of manifest paths to merge into the '
+                      'original AndroidManifest.xml')
+  parser.add_argument('--resource-sources',
+                      default=[],
+                      action='append',
+                      help='GYP-list of resource sources files, similar to '
+                      'java sources files, but for resource files.')
+  parser.add_argument('--resource-zips',
+                      default=[],
+                      action='append',
+                      help='GYP-list of resource zips, zip files of generated '
+                      'resource files.')
+  parser.add_argument('--classpath',
+                      help='List of jars to add to the classpath.')
+  parser.add_argument('--baseline',
+                      help='Baseline file to ignore existing errors and fail '
+                      'on new errors.')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(argv))
+  args.java_sources = build_utils.ParseGnList(args.java_sources)
+  args.aars = build_utils.ParseGnList(args.aars)
+  args.srcjars = build_utils.ParseGnList(args.srcjars)
+  args.resource_sources = build_utils.ParseGnList(args.resource_sources)
+  args.extra_manifest_paths = build_utils.ParseGnList(args.extra_manifest_paths)
+  args.resource_zips = build_utils.ParseGnList(args.resource_zips)
+  args.classpath = build_utils.ParseGnList(args.classpath)
+  return args
+
+
+def main():
+  build_utils.InitLogging('LINT_DEBUG')
+  args = _ParseArgs(sys.argv[1:])
+
+  # TODO(wnwen): Consider removing lint cache now that there are only two lint
+  #              invocations.
+  # Avoid parallelizing cache creation since lint runs without the cache defeat
+  # the purpose of creating the cache in the first place.
+  if (not args.create_cache and not args.skip_build_server
+      and server_utils.MaybeRunCommand(
+          name=args.target_name, argv=sys.argv, stamp_file=args.stamp)):
+    return
+
+  sources = []
+  for java_sources_file in args.java_sources:
+    sources.extend(build_utils.ReadSourcesList(java_sources_file))
+  resource_sources = []
+  for resource_sources_file in args.resource_sources:
+    resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
+
+  possible_depfile_deps = (args.srcjars + args.resource_zips + sources +
+                           resource_sources + [
+                               args.baseline,
+                               args.manifest_path,
+                           ])
+  depfile_deps = [p for p in possible_depfile_deps if p]
+
+  _RunLint(args.lint_binary_path,
+           args.backported_methods,
+           args.config_path,
+           args.manifest_path,
+           args.extra_manifest_paths,
+           sources,
+           args.classpath,
+           args.cache_dir,
+           args.android_sdk_version,
+           args.aars,
+           args.srcjars,
+           args.min_sdk_version,
+           resource_sources,
+           args.resource_zips,
+           args.android_sdk_root,
+           args.lint_gen_dir,
+           args.baseline,
+           testonly_target=args.testonly,
+           warnings_as_errors=args.warnings_as_errors)
+  logging.info('Creating stamp file')
+  build_utils.Touch(args.stamp)
+
+  if args.depfile:
+    build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/lint.pydeps b/src/build/android/gyp/lint.pydeps
new file mode 100644
index 0000000..0994e19
--- /dev/null
+++ b/src/build/android/gyp/lint.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../gn_helpers.py
+lint.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/server_utils.py
diff --git a/src/build/android/gyp/merge_manifest.py b/src/build/android/gyp/merge_manifest.py
new file mode 100755
index 0000000..53f1c11
--- /dev/null
+++ b/src/build/android/gyp/merge_manifest.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges dependency Android manifests into a root manifest."""
+
+import argparse
+import contextlib
+import os
+import sys
+import tempfile
+import xml.etree.ElementTree as ElementTree
+
+from util import build_utils
+from util import manifest_utils
+
+_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
+_MANIFEST_MERGER_JARS = [
+    os.path.join('build-system', 'manifest-merger.jar'),
+    os.path.join('common', 'common.jar'),
+    os.path.join('sdk-common', 'sdk-common.jar'),
+    os.path.join('sdklib', 'sdklib.jar'),
+    os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
+                 'guava-28.1-jre.jar'),
+    os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
+                 'kotlin-stdlib.jar'),
+    os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5',
+                 'gson-2.8.5.jar'),
+]
+
+
+@contextlib.contextmanager
+def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version,
+                     max_sdk_version, manifest_package):
+  """Patches an Android manifest's package and performs assertions to ensure
+  correctness for the manifest.
+  """
+  doc, manifest, _ = manifest_utils.ParseManifest(manifest_path)
+  manifest_utils.AssertUsesSdk(manifest, min_sdk_version, target_sdk_version,
+                               max_sdk_version)
+  assert manifest_utils.GetPackage(manifest) or manifest_package, \
+            'Must set manifest package in GN or in AndroidManifest.xml'
+  manifest_utils.AssertPackage(manifest, manifest_package)
+  if manifest_package:
+    manifest.set('package', manifest_package)
+  tmp_prefix = os.path.basename(manifest_path)
+  with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+    manifest_utils.SaveManifest(doc, patched_manifest.name)
+    yield patched_manifest.name, manifest_utils.GetPackage(manifest)
+
+
+def _BuildManifestMergerClasspath(android_sdk_cmdline_tools):
+  return ':'.join([
+      os.path.join(android_sdk_cmdline_tools, 'lib', jar)
+      for jar in _MANIFEST_MERGER_JARS
+  ])
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--android-sdk-cmdline-tools',
+      help='Path to SDK\'s cmdline-tools folder.',
+      required=True)
+  parser.add_argument('--root-manifest',
+                      help='Root manifest which to merge into',
+                      required=True)
+  parser.add_argument('--output', help='Output manifest path', required=True)
+  parser.add_argument('--extras',
+                      help='GN list of additional manifest to merge')
+  parser.add_argument(
+      '--min-sdk-version',
+      required=True,
+      help='android:minSdkVersion for merging.')
+  parser.add_argument(
+      '--target-sdk-version',
+      required=True,
+      help='android:targetSdkVersion for merging.')
+  parser.add_argument(
+      '--max-sdk-version', help='android:maxSdkVersion for merging.')
+  parser.add_argument(
+      '--manifest-package',
+      help='Package name of the merged AndroidManifest.xml.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  args = parser.parse_args(argv)
+
+  classpath = _BuildManifestMergerClasspath(args.android_sdk_cmdline_tools)
+
+  with build_utils.AtomicOutput(args.output) as output:
+    cmd = build_utils.JavaCmd(args.warnings_as_errors) + [
+        '-cp',
+        classpath,
+        _MANIFEST_MERGER_MAIN_CLASS,
+        '--out',
+        output.name,
+        '--property',
+        'MIN_SDK_VERSION=' + args.min_sdk_version,
+        '--property',
+        'TARGET_SDK_VERSION=' + args.target_sdk_version,
+    ]
+
+    if args.max_sdk_version:
+      cmd += [
+          '--property',
+          'MAX_SDK_VERSION=' + args.max_sdk_version,
+      ]
+
+    extras = build_utils.ParseGnList(args.extras)
+    if extras:
+      cmd += ['--libs', ':'.join(extras)]
+
+    with _ProcessManifest(args.root_manifest, args.min_sdk_version,
+                          args.target_sdk_version, args.max_sdk_version,
+                          args.manifest_package) as tup:
+      root_manifest, package = tup
+      cmd += [
+          '--main',
+          root_manifest,
+          '--property',
+          'PACKAGE=' + package,
+      ]
+      build_utils.CheckOutput(
+          cmd,
+          # https://issuetracker.google.com/issues/63514300:
+          # The merger doesn't set a nonzero exit code for failures.
+          fail_func=lambda returncode, stderr: returncode != 0 or build_utils.
+          IsTimeStale(output.name, [root_manifest] + extras),
+          fail_on_output=args.warnings_as_errors)
+
+    # Check for correct output.
+    _, manifest, _ = manifest_utils.ParseManifest(output.name)
+    manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version,
+                                 args.target_sdk_version)
+    manifest_utils.AssertPackage(manifest, package)
+
+  if args.depfile:
+    inputs = extras + classpath.split(':')
+    build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/merge_manifest.pydeps b/src/build/android/gyp/merge_manifest.pydeps
new file mode 100644
index 0000000..ef9bb34
--- /dev/null
+++ b/src/build/android/gyp/merge_manifest.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../gn_helpers.py
+merge_manifest.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
diff --git a/src/build/android/gyp/native_libraries_template.py b/src/build/android/gyp/native_libraries_template.py
new file mode 100644
index 0000000..cf336ec
--- /dev/null
+++ b/src/build/android/gyp/native_libraries_template.py
@@ -0,0 +1,39 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+//     build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+    public static final int CPU_FAMILY_UNKNOWN = 0;
+    public static final int CPU_FAMILY_ARM = 1;
+    public static final int CPU_FAMILY_MIPS = 2;
+    public static final int CPU_FAMILY_X86 = 3;
+
+    // Set to true to enable the use of the Chromium Linker.
+    public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+    public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+    public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+    // This is the list of native libraries to be loaded (in the correct order)
+    // by LibraryLoader.java.
+    // TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java
+    // directly. The two ways of library loading should be refactored into one.
+    public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+    // This is the expected version of the 'main' native library, which is the one that
+    // implements the initial set of base JNI functions including
+    // base::android::nativeGetVersionName()
+    // TODO(torne): This is public to work around classloader issues in Trichrome
+    // where NativeLibraries is not in the same dex as LibraryLoader.
+    // We should instead split up Java code along package boundaries.
+    public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER};
+
+    public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
diff --git a/src/build/android/gyp/nocompile_test.py b/src/build/android/gyp/nocompile_test.py
new file mode 100755
index 0000000..a5739f1
--- /dev/null
+++ b/src/build/android/gyp/nocompile_test.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Checks that compiling targets in BUILD.gn file fails."""
+
+import argparse
+import json
+import os
+import subprocess
+import re
+import sys
+from util import build_utils
+
+_CHROMIUM_SRC = os.path.normpath(os.path.join(__file__, '..', '..', '..', '..'))
+_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'depot_tools', 'ninja')
+
+# Relative to _CHROMIUM_SRC
+_GN_SRC_REL_PATH = os.path.join('third_party', 'depot_tools', 'gn')
+
+
+def _raise_command_exception(args, returncode, output):
+  """Raises an exception whose message describes a command failure.
+
+    Args:
+      args: shell command-line (as passed to subprocess.Popen())
+      returncode: status code.
+      output: command output.
+    Raises:
+      a new Exception.
+    """
+  message = 'Command failed with status {}: {}\n' \
+      'Output:-----------------------------------------\n{}\n' \
+      '------------------------------------------------\n'.format(
+          returncode, args, output)
+  raise Exception(message)
+
+
+def _run_command(args, cwd=None):
+  """Runs shell command. Raises exception if command fails."""
+  p = subprocess.Popen(args,
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.STDOUT,
+                       cwd=cwd)
+  pout, _ = p.communicate()
+  if p.returncode != 0:
+    _raise_command_exception(args, p.returncode, pout)
+
+
+def _run_command_get_output(args, success_output):
+  """Runs shell command and returns command output."""
+  p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  pout, _ = p.communicate()
+  if p.returncode == 0:
+    return success_output
+
+  # For Python3 only:
+  if isinstance(pout, bytes) and sys.version_info >= (3, ):
+    pout = pout.decode('utf-8')
+  return pout
+
+
+def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args):
+  """Copies args.gn.
+
+    Args:
+      src_args_path: args.gn file to copy.
+      dest_args_path: Copy file destination.
+      extra_args: Text to append to args.gn after copy.
+    """
+  with open(src_args_path) as f_in, open(dest_args_path, 'w') as f_out:
+    f_out.write(f_in.read())
+    f_out.write('\n')
+    f_out.write('\n'.join(extra_args))
+
+
+def _find_lines_after_prefix(text, prefix, num_lines):
+  """Searches |text| for a line which starts with |prefix|.
+
+  Args:
+    text: String to search in.
+    prefix: Prefix to search for.
+    num_lines: Number of lines, starting with line with prefix, to return.
+  Returns:
+    Matched lines. Returns None otherwise.
+  """
+  lines = text.split('\n')
+  for i, line in enumerate(lines):
+    if line.startswith(prefix):
+      return lines[i:i + num_lines]
+  return None
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--gn-args-path',
+                      required=True,
+                      help='Path to args.gn file.')
+  parser.add_argument('--test-configs-path',
+                      required=True,
+                      help='Path to file with test configurations')
+  parser.add_argument('--out-dir',
+                      required=True,
+                      help='Path to output directory to use for compilation.')
+  parser.add_argument('--stamp', help='Path to touch.')
+  options = parser.parse_args()
+
+  with open(options.test_configs_path) as f:
+    test_configs = json.loads(f.read())
+
+  if not os.path.exists(options.out_dir):
+    os.makedirs(options.out_dir)
+
+  out_gn_args_path = os.path.join(options.out_dir, 'args.gn')
+  extra_gn_args = [
+      'enable_android_nocompile_tests = true',
+      'treat_warnings_as_errors = true',
+      # GOMA does not work with non-standard output directories.
+      'use_goma = false',
+  ]
+  _copy_and_append_gn_args(options.gn_args_path, out_gn_args_path,
+                           extra_gn_args)
+
+  # As all of the test targets are declared in the same BUILD.gn file, it does
+  # not matter which test target is used as the root target.
+  gn_args = [
+      _GN_SRC_REL_PATH, '--root-target=' + test_configs[0]['target'], 'gen',
+      os.path.relpath(options.out_dir, _CHROMIUM_SRC)
+  ]
+  _run_command(gn_args, cwd=_CHROMIUM_SRC)
+
+  error_messages = []
+  for config in test_configs:
+    # Strip leading '//'
+    gn_path = config['target'][2:]
+    expect_regex = config['expect_regex']
+    ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path]
+
+    # Purpose of quotes at beginning of message is to make it clear that
+    # "Compile successful." is not a compiler log message.
+    test_output = _run_command_get_output(ninja_args, '""\nCompile successful.')
+
+    failure_message_lines = _find_lines_after_prefix(test_output, 'FAILED:', 5)
+
+    found_expect_regex = False
+    if failure_message_lines:
+      for line in failure_message_lines:
+        if re.search(expect_regex, line):
+          found_expect_regex = True
+          break
+    if not found_expect_regex:
+      error_message = '//{} failed.\nExpected compile output pattern:\n'\
+          '{}\nActual compile output:\n{}'.format(
+              gn_path, expect_regex, test_output)
+      error_messages.append(error_message)
+
+  if error_messages:
+    raise Exception('\n'.join(error_messages))
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/prepare_resources.py b/src/build/android/gyp/prepare_resources.py
new file mode 100755
index 0000000..93fe9f9
--- /dev/null
+++ b/src/build/android/gyp/prepare_resources.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip and R.txt
+files."""
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+from util import md5_check
+from util import resources_parser
+from util import resource_utils
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from argparse.ArgumentParser.parse_args()
+  """
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_argument('--res-sources-path',
+                      required=True,
+                      help='Path to a list of input resources for this target.')
+
+  parser.add_argument(
+      '--r-text-in',
+      help='Path to pre-existing R.txt. Its resource IDs override those found '
+      'in the generated R.txt when generating R.java.')
+
+  parser.add_argument(
+      '--resource-zip-out',
+      help='Path to a zip archive containing all resources from '
+      '--resource-dirs, merged into a single directory tree.')
+
+  parser.add_argument('--r-text-out',
+                      help='Path to store the generated R.txt file.')
+
+  parser.add_argument('--strip-drawables',
+                      action="store_true",
+                      help='Remove drawables from the resources.')
+
+  options = parser.parse_args(args)
+
+  with open(options.res_sources_path) as f:
+    options.sources = f.read().splitlines()
+  options.resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
+      options.sources)
+
+  return options
+
+
+def _CheckAllFilesListed(resource_files, resource_dirs):
+  resource_files = set(resource_files)
+  missing_files = []
+  for path, _ in resource_utils.IterResourceFilesInDirectories(resource_dirs):
+    if path not in resource_files:
+      missing_files.append(path)
+
+  if missing_files:
+    sys.stderr.write('Error: Found files not listed in the sources list of '
+                     'the BUILD.gn target:\n')
+    for path in missing_files:
+      sys.stderr.write('{}\n'.format(path))
+    sys.exit(1)
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+  # ignore_pattern is a string of ':' delimited list of globs used to ignore
+  # files that should not be part of the final resource zip.
+  files_to_zip = []
+  path_info = resource_utils.ResourceInfoFile()
+  for index, resource_dir in enumerate(resource_dirs):
+    attributed_aar = None
+    if not resource_dir.startswith('..'):
+      aar_source_info_path = os.path.join(
+          os.path.dirname(resource_dir), 'source.info')
+      if os.path.exists(aar_source_info_path):
+        attributed_aar = jar_info_utils.ReadAarSourceInfo(aar_source_info_path)
+
+    for path, archive_path in resource_utils.IterResourceFilesInDirectories(
+        [resource_dir], ignore_pattern):
+      attributed_path = path
+      if attributed_aar:
+        attributed_path = os.path.join(attributed_aar, 'res',
+                                       path[len(resource_dir) + 1:])
+      # Use the non-prefixed archive_path in the .info file.
+      path_info.AddMapping(archive_path, attributed_path)
+
+      resource_dir_name = os.path.basename(resource_dir)
+      archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path)
+      files_to_zip.append((archive_path, path))
+
+  path_info.Write(zip_path + '.info')
+
+  with zipfile.ZipFile(zip_path, 'w') as z:
+    # This magic comment signals to resource_utils.ExtractDeps that this zip is
+    # not just the contents of a single res dir, without the encapsulating res/
+    # (like the outputs of android_generated_resources targets), but instead has
+    # the contents of possibly multiple res/ dirs each within an encapsulating
+    # directory within the zip.
+    z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING
+    build_utils.DoZip(files_to_zip, z)
+
+
+def _GenerateRTxt(options, r_txt_path):
+  """Generate R.txt file.
+
+  Args:
+    options: The command-line options tuple.
+    r_txt_path: Locates where the R.txt file goes.
+  """
+  ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+  if options.strip_drawables:
+    ignore_pattern += ':*drawable*'
+
+  resources_parser.RTxtGenerator(options.resource_dirs,
+                                 ignore_pattern).WriteRTxtFile(r_txt_path)
+
+
+def _OnStaleMd5(options):
+  with resource_utils.BuildContext() as build:
+    if options.sources:
+      _CheckAllFilesListed(options.sources, options.resource_dirs)
+    if options.r_text_in:
+      r_txt_path = options.r_text_in
+    else:
+      _GenerateRTxt(options, build.r_txt_path)
+      r_txt_path = build.r_txt_path
+
+    if options.r_text_out:
+      shutil.copyfile(r_txt_path, options.r_text_out)
+
+    if options.resource_zip_out:
+      ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+      if options.strip_drawables:
+        ignore_pattern += ':*drawable*'
+      _ZipResources(options.resource_dirs, options.resource_zip_out,
+                    ignore_pattern)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  # Order of these must match order specified in GN so that the correct one
+  # appears first in the depfile.
+  output_paths = [
+      options.resource_zip_out,
+      options.resource_zip_out + '.info',
+      options.r_text_out,
+  ]
+
+  input_paths = [options.res_sources_path]
+  if options.r_text_in:
+    input_paths += [options.r_text_in]
+
+  # Resource files aren't explicitly listed in GN. Listing them in the depfile
+  # ensures the target will be marked stale when resource files are removed.
+  depfile_deps = []
+  resource_names = []
+  for resource_dir in options.resource_dirs:
+    for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+      # Don't list the empty .keep file in depfile. Since it doesn't end up
+      # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+      # if ever moved.
+      if not resource_file.endswith(os.path.join('empty', '.keep')):
+        input_paths.append(resource_file)
+        depfile_deps.append(resource_file)
+      resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+  # Resource filenames matter to the output, so add them to strings as well.
+  # This matters if a file is renamed but not changed (http://crbug.com/597126).
+  input_strings = sorted(resource_names) + [
+      options.strip_drawables,
+  ]
+
+  # Since android_resources targets like *__all_dfm_resources depend on java
+  # targets that they do not need (in reality it only needs the transitive
+  # resource targets that those java targets depend on), md5_check is used to
+  # prevent outputs from being re-written when real inputs have not changed.
+  md5_check.CallAndWriteDepfileIfStale(lambda: _OnStaleMd5(options),
+                                       options,
+                                       input_paths=input_paths,
+                                       input_strings=input_strings,
+                                       output_paths=output_paths,
+                                       depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/prepare_resources.pydeps b/src/build/android/gyp/prepare_resources.pydeps
new file mode 100644
index 0000000..b225918
--- /dev/null
+++ b/src/build/android/gyp/prepare_resources.pydeps
@@ -0,0 +1,34 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+prepare_resources.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/src/build/android/gyp/process_native_prebuilt.py b/src/build/android/gyp/process_native_prebuilt.py
new file mode 100755
index 0000000..52645d9
--- /dev/null
+++ b/src/build/android/gyp/process_native_prebuilt.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def main(args):
+  parser = argparse.ArgumentParser(args)
+  parser.add_argument('--strip-path', required=True, help='')
+  parser.add_argument('--input-path', required=True, help='')
+  parser.add_argument('--stripped-output-path', required=True, help='')
+  parser.add_argument('--unstripped-output-path', required=True, help='')
+  options = parser.parse_args(args)
+
+  # eu-strip's output keeps mode from source file which might not be writable
+  # thus it fails to override its output on the next run. AtomicOutput fixes
+  # the issue.
+  with build_utils.AtomicOutput(options.stripped_output_path) as out:
+    cmd = [
+        options.strip_path,
+        options.input_path,
+        '-o',
+        out.name,
+    ]
+    build_utils.CheckOutput(cmd)
+  shutil.copyfile(options.input_path, options.unstripped_output_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/process_native_prebuilt.pydeps b/src/build/android/gyp/process_native_prebuilt.pydeps
new file mode 100644
index 0000000..8e2012a
--- /dev/null
+++ b/src/build/android/gyp/process_native_prebuilt.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py
+../../gn_helpers.py
+process_native_prebuilt.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/proguard.py b/src/build/android/gyp/proguard.py
new file mode 100755
index 0000000..7f59769
--- /dev/null
+++ b/src/build/android/gyp/proguard.py
@@ -0,0 +1,722 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+from collections import defaultdict
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import dex
+import dex_jdk_libs
+from pylib.dex import dex_parser
+from util import build_utils
+from util import diff_utils
+
+_API_LEVEL_VERSION_CODE = [
+    (21, 'L'),
+    (22, 'LollipopMR1'),
+    (23, 'M'),
+    (24, 'N'),
+    (25, 'NMR1'),
+    (26, 'O'),
+    (27, 'OMR1'),
+    (28, 'P'),
+    (29, 'Q'),
+    (30, 'R'),
+    (31, 'S'),
+]
+
+
+def _ParseOptions():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--r8-path',
+                      required=True,
+                      help='Path to the R8.jar to use.')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument('--input-paths',
+                      action='append',
+                      required=True,
+                      help='GN-list of .jar files to optimize.')
+  parser.add_argument('--desugar-jdk-libs-jar',
+                      help='Path to desugar_jdk_libs.jar.')
+  parser.add_argument('--desugar-jdk-libs-configuration-jar',
+                      help='Path to desugar_jdk_libs_configuration.jar.')
+  parser.add_argument('--output-path', help='Path to the generated .jar file.')
+  parser.add_argument(
+      '--proguard-configs',
+      action='append',
+      required=True,
+      help='GN-list of configuration files.')
+  parser.add_argument(
+      '--apply-mapping', help='Path to ProGuard mapping to apply.')
+  parser.add_argument(
+      '--mapping-output',
+      required=True,
+      help='Path for ProGuard to output mapping file to.')
+  parser.add_argument(
+      '--extra-mapping-output-paths',
+      help='GN-list of additional paths to copy output mapping file to.')
+  parser.add_argument(
+      '--classpath',
+      action='append',
+      help='GN-list of .jar files to include as libraries.')
+  parser.add_argument('--main-dex-rules-path',
+                      action='append',
+                      help='Path to main dex rules for multidex.')
+  parser.add_argument(
+      '--min-api', help='Minimum Android API level compatibility.')
+  parser.add_argument('--enable-obfuscation',
+                      action='store_true',
+                      help='Minify symbol names')
+  parser.add_argument(
+      '--verbose', '-v', action='store_true', help='Print all ProGuard output')
+  parser.add_argument(
+      '--repackage-classes', help='Package all optimized classes are put in.')
+  parser.add_argument(
+      '--disable-outlining',
+      action='store_true',
+      help='Disable the outlining optimization provided by R8.')
+  parser.add_argument(
+    '--disable-checks',
+    action='store_true',
+    help='Disable -checkdiscard directives and missing symbols check')
+  parser.add_argument('--sourcefile', help='Value for source file attribute')
+  parser.add_argument(
+      '--force-enable-assertions',
+      action='store_true',
+      help='Forcefully enable javac generated assertion code.')
+  parser.add_argument(
+      '--feature-jars',
+      action='append',
+      help='GN list of path to jars which comprise the corresponding feature.')
+  parser.add_argument(
+      '--dex-dest',
+      action='append',
+      dest='dex_dests',
+      help='Destination for dex file of the corresponding feature.')
+  parser.add_argument(
+      '--feature-name',
+      action='append',
+      dest='feature_names',
+      help='The name of the feature module.')
+  parser.add_argument(
+      '--uses-split',
+      action='append',
+      help='List of name pairs separated by : mapping a feature module to a '
+      'dependent feature module.')
+  parser.add_argument(
+      '--keep-rules-targets-regex',
+      metavar='KEEP_RULES_REGEX',
+      help='If passed outputs keep rules for references from all other inputs '
+      'to the subset of inputs that satisfy the KEEP_RULES_REGEX.')
+  parser.add_argument(
+      '--keep-rules-output-path',
+      help='Output path to the keep rules for references to the '
+      '--keep-rules-targets-regex inputs from the rest of the inputs.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--show-desugar-default-interface-warnings',
+                      action='store_true',
+                      help='Enable desugaring warnings.')
+  parser.add_argument('--dump-inputs',
+                      action='store_true',
+                      help='Use when filing R8 bugs to capture inputs.'
+                      ' Stores inputs to r8inputs.zip')
+  parser.add_argument(
+      '--stamp',
+      help='File to touch upon success. Mutually exclusive with --output-path')
+  parser.add_argument('--desugared-library-keep-rule-output',
+                      help='Path to desugared library keep rule output file.')
+
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+
+  if options.feature_names:
+    if options.output_path:
+      parser.error('Feature splits cannot specify an output in GN.')
+    if not options.actual_file and not options.stamp:
+      parser.error('Feature splits require a stamp file as output.')
+  elif not options.output_path:
+    parser.error('Output path required when feature splits aren\'t used')
+
+  if bool(options.keep_rules_targets_regex) != bool(
+      options.keep_rules_output_path):
+    raise Exception('You must path both --keep-rules-targets-regex and '
+                    '--keep-rules-output-path')
+
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  options.input_paths = build_utils.ParseGnList(options.input_paths)
+  options.extra_mapping_output_paths = build_utils.ParseGnList(
+      options.extra_mapping_output_paths)
+
+  if options.feature_names:
+    if 'base' not in options.feature_names:
+      parser.error('"base" feature required when feature arguments are used.')
+    if len(options.feature_names) != len(options.feature_jars) or len(
+        options.feature_names) != len(options.dex_dests):
+      parser.error('Invalid feature argument lengths.')
+
+    options.feature_jars = [
+        build_utils.ParseGnList(x) for x in options.feature_jars
+    ]
+
+  split_map = {}
+  if options.uses_split:
+    for split_pair in options.uses_split:
+      child, parent = split_pair.split(':')
+      for name in (child, parent):
+        if name not in options.feature_names:
+          parser.error('"%s" referenced in --uses-split not present.' % name)
+      split_map[child] = parent
+  options.uses_split = split_map
+
+  return options
+
+
+class _SplitContext(object):
+  def __init__(self, name, output_path, input_jars, work_dir, parent_name=None):
+    self.name = name
+    self.parent_name = parent_name
+    self.input_jars = set(input_jars)
+    self.final_output_path = output_path
+    self.staging_dir = os.path.join(work_dir, name)
+    os.mkdir(self.staging_dir)
+
+  def CreateOutput(self, has_imported_lib=False, keep_rule_output=None):
+    found_files = build_utils.FindInDirectory(self.staging_dir)
+    if not found_files:
+      raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
+
+    if self.final_output_path.endswith('.dex'):
+      if has_imported_lib:
+        raise Exception(
+            'Trying to create a single .dex file, but a dependency requires '
+            'JDK Library Desugaring (which necessitates a second file).'
+            'Refer to %s to see what desugaring was required' %
+            keep_rule_output)
+      if len(found_files) != 1:
+        raise Exception('Expected exactly 1 dex file output, found: {}'.format(
+            '\t'.join(found_files)))
+      shutil.move(found_files[0], self.final_output_path)
+      return
+
+    # Add to .jar using Python rather than having R8 output to a .zip directly
+    # in order to disable compression of the .jar, saving ~500ms.
+    tmp_jar_output = self.staging_dir + '.jar'
+    build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir)
+    shutil.move(tmp_jar_output, self.final_output_path)
+
+
+def _DeDupeInputJars(split_contexts_by_name):
+  """Moves jars used by multiple splits into common ancestors.
+
+  Updates |input_jars| for each _SplitContext.
+  """
+
+  def count_ancestors(split_context):
+    ret = 0
+    if split_context.parent_name:
+      ret += 1
+      ret += count_ancestors(split_contexts_by_name[split_context.parent_name])
+    return ret
+
+  base_context = split_contexts_by_name['base']
+  # Sort by tree depth so that ensure children are visited before their parents.
+  sorted_contexts = list(split_contexts_by_name.values())
+  sorted_contexts.remove(base_context)
+  sorted_contexts.sort(key=count_ancestors, reverse=True)
+
+  # If a jar is present in multiple siblings, promote it to their parent.
+  seen_jars_by_parent = defaultdict(set)
+  for split_context in sorted_contexts:
+    seen_jars = seen_jars_by_parent[split_context.parent_name]
+    new_dupes = seen_jars.intersection(split_context.input_jars)
+    parent_context = split_contexts_by_name[split_context.parent_name]
+    parent_context.input_jars.update(new_dupes)
+    seen_jars.update(split_context.input_jars)
+
+  def ancestor_jars(parent_name, dest=None):
+    dest = dest or set()
+    if not parent_name:
+      return dest
+    parent_context = split_contexts_by_name[parent_name]
+    dest.update(parent_context.input_jars)
+    return ancestor_jars(parent_context.parent_name, dest)
+
+  # Now that jars have been moved up the tree, remove those that appear in
+  # ancestors.
+  for split_context in sorted_contexts:
+    split_context.input_jars -= ancestor_jars(split_context.parent_name)
+
+
+def _OptimizeWithR8(options,
+                    config_paths,
+                    libraries,
+                    dynamic_config_data,
+                    print_stdout=False):
+  with build_utils.TempDir() as tmp_dir:
+    if dynamic_config_data:
+      dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags')
+      with open(dynamic_config_path, 'w') as f:
+        f.write(dynamic_config_data)
+      config_paths = config_paths + [dynamic_config_path]
+
+    tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
+    # If there is no output (no classes are kept), this prevents this script
+    # from failing.
+    build_utils.Touch(tmp_mapping_path)
+
+    tmp_output = os.path.join(tmp_dir, 'r8out')
+    os.mkdir(tmp_output)
+
+    split_contexts_by_name = {}
+    if options.feature_names:
+      for name, dest_dex, input_jars in zip(options.feature_names,
+                                            options.dex_dests,
+                                            options.feature_jars):
+        parent_name = options.uses_split.get(name)
+        if parent_name is None and name != 'base':
+          parent_name = 'base'
+        split_context = _SplitContext(name,
+                                      dest_dex,
+                                      input_jars,
+                                      tmp_output,
+                                      parent_name=parent_name)
+        split_contexts_by_name[name] = split_context
+    else:
+      # Base context will get populated via "extra_jars" below.
+      split_contexts_by_name['base'] = _SplitContext('base',
+                                                     options.output_path, [],
+                                                     tmp_output)
+    base_context = split_contexts_by_name['base']
+
+    # R8 OOMs with the default xmx=1G.
+    cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
+        '-Dcom.android.tools.r8.allowTestProguardOptions=1',
+        '-Dcom.android.tools.r8.verticalClassMerging=1',
+        '-Dcom.android.tools.r8.disableHorizontalClassMerging=1',
+    ]
+    if options.disable_outlining:
+      cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
+    if options.dump_inputs:
+      cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
+    cmd += [
+        '-cp',
+        options.r8_path,
+        'com.android.tools.r8.R8',
+        '--no-data-resources',
+        '--output',
+        base_context.staging_dir,
+        '--pg-map-output',
+        tmp_mapping_path,
+    ]
+
+    if options.disable_checks:
+      # Info level priority logs are not printed by default.
+      cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info']
+
+    if options.desugar_jdk_libs_json:
+      cmd += [
+          '--desugared-lib',
+          options.desugar_jdk_libs_json,
+          '--desugared-lib-pg-conf-output',
+          options.desugared_library_keep_rule_output,
+      ]
+
+    if options.min_api:
+      cmd += ['--min-api', options.min_api]
+
+    if options.force_enable_assertions:
+      cmd += ['--force-enable-assertions']
+
+    for lib in libraries:
+      cmd += ['--lib', lib]
+
+    for config_file in config_paths:
+      cmd += ['--pg-conf', config_file]
+
+    if options.main_dex_rules_path:
+      for main_dex_rule in options.main_dex_rules_path:
+        cmd += ['--main-dex-rules', main_dex_rule]
+
+    _DeDupeInputJars(split_contexts_by_name)
+
+    # Add any extra inputs to the base context (e.g. desugar runtime).
+    extra_jars = set(options.input_paths)
+    for split_context in split_contexts_by_name.values():
+      extra_jars -= split_context.input_jars
+    base_context.input_jars.update(extra_jars)
+
+    for split_context in split_contexts_by_name.values():
+      if split_context is base_context:
+        continue
+      for in_jar in sorted(split_context.input_jars):
+        cmd += ['--feature', in_jar, split_context.staging_dir]
+
+    cmd += sorted(base_context.input_jars)
+
+    try:
+      stderr_filter = dex.CreateStderrFilter(
+          options.show_desugar_default_interface_warnings)
+      logging.debug('Running R8')
+      build_utils.CheckOutput(cmd,
+                              print_stdout=print_stdout,
+                              stderr_filter=stderr_filter,
+                              fail_on_output=options.warnings_as_errors)
+    except build_utils.CalledProcessError as err:
+      debugging_link = ('\n\nR8 failed. Please see {}.'.format(
+          'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
+          'android/docs/java_optimization.md#Debugging-common-failures\n'))
+      raise build_utils.CalledProcessError(err.cwd, err.args,
+                                           err.output + debugging_link)
+
+    base_has_imported_lib = False
+    if options.desugar_jdk_libs_json:
+      logging.debug('Running L8')
+      existing_files = build_utils.FindInDirectory(base_context.staging_dir)
+      jdk_dex_output = os.path.join(base_context.staging_dir,
+                                    'classes%d.dex' % (len(existing_files) + 1))
+      # Use -applymapping to avoid name collisions.
+      l8_dynamic_config_path = os.path.join(tmp_dir, 'l8_dynamic_config.flags')
+      with open(l8_dynamic_config_path, 'w') as f:
+        f.write("-applymapping '{}'\n".format(tmp_mapping_path))
+      # Pass the dynamic config so that obfuscation options are picked up.
+      l8_config_paths = [dynamic_config_path, l8_dynamic_config_path]
+      if os.path.exists(options.desugared_library_keep_rule_output):
+        l8_config_paths.append(options.desugared_library_keep_rule_output)
+
+      base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
+          options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+          options.desugar_jdk_libs_jar,
+          options.desugar_jdk_libs_configuration_jar, jdk_dex_output,
+          options.warnings_as_errors, l8_config_paths)
+      if int(options.min_api) >= 24 and base_has_imported_lib:
+        with open(jdk_dex_output, 'rb') as f:
+          dexfile = dex_parser.DexFile(bytearray(f.read()))
+          for m in dexfile.IterMethodSignatureParts():
+            print('{}#{}'.format(m[0], m[2]))
+        assert False, (
+            'Desugared JDK libs are disabled on Monochrome and newer - see '
+            'crbug.com/1159984 for details, and see above list for desugared '
+            'classes and methods.')
+
+    logging.debug('Collecting ouputs')
+    base_context.CreateOutput(base_has_imported_lib,
+                              options.desugared_library_keep_rule_output)
+    for split_context in split_contexts_by_name.values():
+      if split_context is not base_context:
+        split_context.CreateOutput()
+
+    with open(options.mapping_output, 'w') as out_file, \
+        open(tmp_mapping_path) as in_file:
+      # Mapping files generated by R8 include comments that may break
+      # some of our tooling so remove those (specifically: apkanalyzer).
+      out_file.writelines(l for l in in_file if not l.startswith('#'))
+  return base_context
+
+
+def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
+                     keep_rules_output):
+  cmd = build_utils.JavaCmd(False) + [
+      '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+      '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+      '--keep-rules', '--output', keep_rules_output
+  ]
+  targets_re = re.compile(targets_re_string)
+  for path in input_paths:
+    if targets_re.search(path):
+      cmd += ['--target', path]
+    else:
+      cmd += ['--source', path]
+  for path in classpath:
+    cmd += ['--lib', path]
+
+  build_utils.CheckOutput(cmd, print_stderr=False, fail_on_output=False)
+
+
+def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
+                            error_title):
+  cmd = build_utils.JavaCmd(warnings_as_errors) + [
+      '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+      '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+      '--check'
+  ]
+
+  for path in classpath:
+    cmd += ['--lib', path]
+  for path in dex_files:
+    cmd += ['--source', path]
+
+  def stderr_filter(stderr):
+    ignored_lines = [
+        # Summary contains warning count, which our filtering makes wrong.
+        'Warning: Tracereferences found',
+
+        # TODO(agrieve): Create interface jars for these missing classes rather
+        #     than allowlisting here.
+        'dalvik/system',
+        'libcore/io',
+        'sun/misc/Unsafe',
+
+        # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
+        ('android/text/StaticLayout;<init>(Ljava/lang/CharSequence;IILandroid'
+         '/text/TextPaint;ILandroid/text/Layout$Alignment;Landroid/text/'
+         'TextDirectionHeuristic;FFZLandroid/text/TextUtils$TruncateAt;II)V'),
+
+        # Found in
+        # com/google/android/gms/cast/framework/media/internal/ResourceProvider
+        # Missing due to setting "strip_resources = true".
+        'com/google/android/gms/cast/framework/R',
+
+        # Found in com/google/android/gms/common/GoogleApiAvailability
+        # Missing due to setting "strip_drawables = true".
+        'com/google/android/gms/base/R$drawable',
+
+        # Explicictly guarded by try (NoClassDefFoundError) in Flogger's
+        # PlatformProvider.
+        'com/google/common/flogger/backend/google/GooglePlatform',
+        'com/google/common/flogger/backend/system/DefaultPlatform',
+
+        # trichrome_webview_google_bundle contains this missing reference.
+        # TODO(crbug.com/1142530): Fix this missing reference properly.
+        'org/chromium/build/NativeLibraries',
+
+        # TODO(agrieve): Exclude these only when use_jacoco_coverage=true.
+        'Ljava/lang/instrument/ClassFileTransformer',
+        'Ljava/lang/instrument/IllegalClassFormatException',
+        'Ljava/lang/instrument/Instrumentation',
+        'Ljava/lang/management/ManagementFactory',
+        'Ljavax/management/MBeanServer',
+        'Ljavax/management/ObjectInstance',
+        'Ljavax/management/ObjectName',
+        'Ljavax/management/StandardMBean',
+
+        # Explicitly guarded by try (NoClassDefFoundError) in Firebase's
+        # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector.
+        'Lkotlin/KotlinVersion',
+    ]
+
+    had_unfiltered_items = '  ' in stderr
+    stderr = build_utils.FilterLines(
+        stderr, '|'.join(re.escape(x) for x in ignored_lines))
+    if stderr:
+      if '  ' in stderr:
+        stderr = error_title + """
+Tip: Build with:
+        is_java_debug=false
+        treat_warnings_as_errors=false
+        enable_proguard_obfuscation=false
+     and then use dexdump to see which class(s) reference them.
+
+     E.g.:
+       third_party/android_sdk/public/build-tools/*/dexdump -d \
+out/Release/apks/YourApk.apk > dex.txt
+""" + stderr
+
+        if 'FragmentActivity' in stderr:
+          stderr += """
+You may need to update build configs to run FragmentActivityReplacer for
+additional targets. See
+https://chromium.googlesource.com/chromium/src.git/+/master/docs/ui/android/bytecode_rewriting.md.
+"""
+      elif had_unfiltered_items:
+        # Left only with empty headings. All indented items filtered out.
+        stderr = ''
+    return stderr
+
+  logging.debug('cmd: %s', ' '.join(cmd))
+  build_utils.CheckOutput(cmd,
+                          print_stdout=True,
+                          stderr_filter=stderr_filter,
+                          fail_on_output=warnings_as_errors)
+
+
+def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False):
+  ret = []
+
+  # Sort in this way so //clank versions of the same libraries will sort
+  # to the same spot in the file.
+  def sort_key(path):
+    return tuple(reversed(path.split(os.path.sep)))
+
+  for config in sorted(configs, key=sort_key):
+    if exclude_generated and config.endswith('.resources.proguard.txt'):
+      continue
+
+    with open(config) as config_file:
+      contents = config_file.read().rstrip()
+
+    if not contents.strip():
+      # Ignore empty files.
+      continue
+
+    # Fix up line endings (third_party configs can have windows endings).
+    contents = contents.replace('\r', '')
+    # Remove numbers from generated rule comments to make file more
+    # diff'able.
+    contents = re.sub(r' #generated:\d+', '', contents)
+    ret.append('# File: ' + config)
+    ret.append(contents)
+    ret.append('')
+
+  if dynamic_config_data:
+    ret.append('# File: //build/android/gyp/proguard.py (generated rules)')
+    ret.append(dynamic_config_data)
+    ret.append('')
+  return '\n'.join(ret)
+
+
+def _CreateDynamicConfig(options):
+  # Our scripts already fail on output. Adding -ignorewarnings makes R8 output
+  # warnings rather than throw exceptions so we can selectively ignore them via
+  # dex.py's ignore list. Context: https://crbug.com/1180222
+  ret = ["-ignorewarnings"]
+
+  if options.sourcefile:
+    ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" %
+               options.sourcefile)
+
+  if options.enable_obfuscation:
+    ret.append("-repackageclasses ''")
+  else:
+    ret.append("-dontobfuscate")
+
+  if options.apply_mapping:
+    ret.append("-applymapping '%s'" % options.apply_mapping)
+
+  _min_api = int(options.min_api) if options.min_api else 0
+  for api_level, version_code in _API_LEVEL_VERSION_CODE:
+    annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code
+    if api_level > _min_api:
+      ret.append('-keep @interface %s' % annotation_name)
+      ret.append("""\
+-if @%s class * {
+    *** *(...);
+}
+-keep,allowobfuscation class <1> {
+    *** <2>(...);
+}""" % annotation_name)
+      ret.append("""\
+-keepclassmembers,allowobfuscation class ** {
+  @%s <methods>;
+}""" % annotation_name)
+  return '\n'.join(ret)
+
+
+def _VerifyNoEmbeddedConfigs(jar_paths):
+  failed = False
+  for jar_path in jar_paths:
+    with zipfile.ZipFile(jar_path) as z:
+      for name in z.namelist():
+        if name.startswith('META-INF/proguard/'):
+          failed = True
+          sys.stderr.write("""\
+Found embedded proguard config within {}.
+Embedded configs are not permitted (https://crbug.com/989505)
+""".format(jar_path))
+          break
+  if failed:
+    sys.exit(1)
+
+
+def _ContainsDebuggingConfig(config_str):
+  debugging_configs = ('-whyareyoukeeping', '-whyareyounotinlining')
+  return any(config in config_str for config in debugging_configs)
+
+
+def _MaybeWriteStampAndDepFile(options, inputs):
+  output = options.output_path
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+    output = options.stamp
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
+
+
+def main():
+  build_utils.InitLogging('PROGUARD_DEBUG')
+  options = _ParseOptions()
+
+  logging.debug('Preparing configs')
+  proguard_configs = options.proguard_configs
+
+  # ProGuard configs that are derived from flags.
+  dynamic_config_data = _CreateDynamicConfig(options)
+
+  # ProGuard configs that are derived from flags.
+  merged_configs = _CombineConfigs(
+      proguard_configs, dynamic_config_data, exclude_generated=True)
+  print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose
+
+  if options.expected_file:
+    diff_utils.CheckExpectations(merged_configs, options)
+    if options.only_verify_expectations:
+      build_utils.WriteDepfile(options.depfile,
+                               options.actual_file,
+                               inputs=options.proguard_configs)
+      return
+
+  logging.debug('Looking for embedded configs')
+  libraries = []
+  for p in options.classpath:
+    # TODO(bjoyce): Remove filter once old android support libraries are gone.
+    # Fix for having Library class extend program class dependency problem.
+    if 'com_android_support' in p or 'android_support_test' in p:
+      continue
+    # If a jar is part of input no need to include it as library jar.
+    if p not in libraries and p not in options.input_paths:
+      libraries.append(p)
+  _VerifyNoEmbeddedConfigs(options.input_paths + libraries)
+  if options.keep_rules_output_path:
+    _OutputKeepRules(options.r8_path, options.input_paths, options.classpath,
+                     options.keep_rules_targets_regex,
+                     options.keep_rules_output_path)
+    return
+
+  base_context = _OptimizeWithR8(options, proguard_configs, libraries,
+                                 dynamic_config_data, print_stdout)
+
+  if not options.disable_checks:
+    logging.debug('Running tracereferences')
+    all_dex_files = []
+    if options.output_path:
+      all_dex_files.append(options.output_path)
+    if options.dex_dests:
+      all_dex_files.extend(options.dex_dests)
+    error_title = 'DEX contains references to non-existent symbols after R8.'
+    _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath,
+                            options.warnings_as_errors, error_title)
+    # Also ensure that base module doesn't have any references to child dex
+    # symbols.
+    # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put
+    #     synthesized classes in the base module.
+    error_title = 'Base module DEX contains references symbols within DFMs.'
+    _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path],
+                            options.classpath, options.warnings_as_errors,
+                            error_title)
+
+  for output in options.extra_mapping_output_paths:
+    shutil.copy(options.mapping_output, output)
+
+  inputs = options.proguard_configs + options.input_paths + libraries
+  if options.apply_mapping:
+    inputs.append(options.apply_mapping)
+
+  _MaybeWriteStampAndDepFile(options, inputs)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/proguard.pydeps b/src/build/android/gyp/proguard.pydeps
new file mode 100644
index 0000000..c1de73b
--- /dev/null
+++ b/src/build/android/gyp/proguard.pydeps
@@ -0,0 +1,16 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+dex.py
+dex_jdk_libs.py
+proguard.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/src/build/android/gyp/proto/Configuration_pb2.py b/src/build/android/gyp/proto/Configuration_pb2.py
new file mode 100644
index 0000000..8591830
--- /dev/null
+++ b/src/build/android/gyp/proto/Configuration_pb2.py
@@ -0,0 +1,697 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Configuration.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='frameworks/base/tools/aapt2/Configuration.proto',
+  package='aapt.pb',
+  syntax='proto3',
+  serialized_options=_b('\n\020com.android.aapt'),
+  serialized_pb=_b('\n/frameworks/base/tools/aapt2/Configuration.proto\x12\x07\x61\x61pt.pb\"\xd9\x14\n\rConfiguration\x12\x0b\n\x03mcc\x18\x01 \x01(\r\x12\x0b\n\x03mnc\x18\x02 \x01(\r\x12\x0e\n\x06locale\x18\x03 \x01(\t\x12@\n\x10layout_direction\x18\x04 \x01(\x0e\x32&.aapt.pb.Configuration.LayoutDirection\x12\x14\n\x0cscreen_width\x18\x05 \x01(\r\x12\x15\n\rscreen_height\x18\x06 \x01(\r\x12\x17\n\x0fscreen_width_dp\x18\x07 \x01(\r\x12\x18\n\x10screen_height_dp\x18\x08 \x01(\r\x12 \n\x18smallest_screen_width_dp\x18\t \x01(\r\x12\x43\n\x12screen_layout_size\x18\n \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutSize\x12\x43\n\x12screen_layout_long\x18\x0b \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutLong\x12\x38\n\x0cscreen_round\x18\x0c \x01(\x0e\x32\".aapt.pb.Configuration.ScreenRound\x12?\n\x10wide_color_gamut\x18\r \x01(\x0e\x32%.aapt.pb.Configuration.WideColorGamut\x12\'\n\x03hdr\x18\x0e \x01(\x0e\x32\x1a.aapt.pb.Configuration.Hdr\x12\x37\n\x0borientation\x18\x0f \x01(\x0e\x32\".aapt.pb.Configuration.Orientation\x12\x37\n\x0cui_mode_type\x18\x10 \x01(\x0e\x32!.aapt.pb.Configuration.UiModeType\x12\x39\n\rui_mode_night\x18\x11 \x01(\x0e\x32\".aapt.pb.Configuration.UiModeNight\x12\x0f\n\x07\x64\x65nsity\x18\x12 \x01(\r\x12\x37\n\x0btouchscreen\x18\x13 \x01(\x0e\x32\".aapt.pb.Configuration.Touchscreen\x12\x36\n\x0bkeys_hidden\x18\x14 \x01(\x0e\x32!.aapt.pb.Configuration.KeysHidden\x12\x31\n\x08keyboard\x18\x15 \x01(\x0e\x32\x1f.aapt.pb.Configuration.Keyboard\x12\x34\n\nnav_hidden\x18\x16 \x01(\x0e\x32 .aapt.pb.Configuration.NavHidden\x12\x35\n\nnavigation\x18\x17 \x01(\x0e\x32!.aapt.pb.Configuration.Navigation\x12\x13\n\x0bsdk_version\x18\x18 \x01(\r\x12\x0f\n\x07product\x18\x19 \x01(\t\"a\n\x0fLayoutDirection\x12\x1a\n\x16LAYOUT_DIRECTION_UNSET\x10\x00\x12\x18\n\x14LAYOUT_DIRECTION_LTR\x10\x01\x12\x18\n\x14LAYOUT_DIRECTION_RTL\x10\x02\"\xaa\x01\n\x10ScreenLayoutSize\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_UNSET\x10\x00\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_SMALL\x10\x01\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_NORMAL\x10\x02\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_LARGE\x10\x03\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_XLARGE\x10\x04\"m\n\x10ScreenLayoutLong\x12\x1c\n\x18SCREEN_LAYOUT_LONG_UNSET\x10\x00\x12\x1b\n\x17SCREEN_LAYOUT_LONG_LONG\x10\x01\x12\x1e\n\x1aSCREEN_LAYOUT_LONG_NOTLONG\x10\x02\"X\n\x0bScreenRound\x12\x16\n\x12SCREEN_ROUND_UNSET\x10\x00\x12\x16\n\x12SCREEN_ROUND_ROUND\x10\x01\x12\x19\n\x15SCREEN_ROUND_NOTROUND\x10\x02\"h\n\x0eWideColorGamut\x12\x1a\n\x16WIDE_COLOR_GAMUT_UNSET\x10\x00\x12\x1b\n\x17WIDE_COLOR_GAMUT_WIDECG\x10\x01\x12\x1d\n\x19WIDE_COLOR_GAMUT_NOWIDECG\x10\x02\"3\n\x03Hdr\x12\r\n\tHDR_UNSET\x10\x00\x12\x0e\n\nHDR_HIGHDR\x10\x01\x12\r\n\tHDR_LOWDR\x10\x02\"h\n\x0bOrientation\x12\x15\n\x11ORIENTATION_UNSET\x10\x00\x12\x14\n\x10ORIENTATION_PORT\x10\x01\x12\x14\n\x10ORIENTATION_LAND\x10\x02\x12\x16\n\x12ORIENTATION_SQUARE\x10\x03\"\xd7\x01\n\nUiModeType\x12\x16\n\x12UI_MODE_TYPE_UNSET\x10\x00\x12\x17\n\x13UI_MODE_TYPE_NORMAL\x10\x01\x12\x15\n\x11UI_MODE_TYPE_DESK\x10\x02\x12\x14\n\x10UI_MODE_TYPE_CAR\x10\x03\x12\x1b\n\x17UI_MODE_TYPE_TELEVISION\x10\x04\x12\x1a\n\x16UI_MODE_TYPE_APPLIANCE\x10\x05\x12\x16\n\x12UI_MODE_TYPE_WATCH\x10\x06\x12\x1a\n\x16UI_MODE_TYPE_VRHEADSET\x10\x07\"[\n\x0bUiModeNight\x12\x17\n\x13UI_MODE_NIGHT_UNSET\x10\x00\x12\x17\n\x13UI_MODE_NIGHT_NIGHT\x10\x01\x12\x1a\n\x16UI_MODE_NIGHT_NOTNIGHT\x10\x02\"m\n\x0bTouchscreen\x12\x15\n\x11TOUCHSCREEN_UNSET\x10\x00\x12\x17\n\x13TOUCHSCREEN_NOTOUCH\x10\x01\x12\x16\n\x12TOUCHSCREEN_STYLUS\x10\x02\x12\x16\n\x12TOUCHSCREEN_FINGER\x10\x03\"v\n\nKeysHidden\x12\x15\n\x11KEYS_HIDDEN_UNSET\x10\x00\x12\x1b\n\x17KEYS_HIDDEN_KEYSEXPOSED\x10\x01\x12\x1a\n\x16KEYS_HIDDEN_KEYSHIDDEN\x10\x02\x12\x18\n\x14KEYS_HIDDEN_KEYSSOFT\x10\x03\"`\n\x08Keyboard\x12\x12\n\x0eKEYBOARD_UNSET\x10\x00\x12\x13\n\x0fKEYBOARD_NOKEYS\x10\x01\x12\x13\n\x0fKEYBOARD_QWERTY\x10\x02\x12\x16\n\x12KEYBOARD_TWELVEKEY\x10\x03\"V\n\tNavHidden\x12\x14\n\x10NAV_HIDDEN_UNSET\x10\x00\x12\x19\n\x15NAV_HIDDEN_NAVEXPOSED\x10\x01\x12\x18\n\x14NAV_HIDDEN_NAVHIDDEN\x10\x02\"}\n\nNavigation\x12\x14\n\x10NAVIGATION_UNSET\x10\x00\x12\x14\n\x10NAVIGATION_NONAV\x10\x01\x12\x13\n\x0fNAVIGATION_DPAD\x10\x02\x12\x18\n\x14NAVIGATION_TRACKBALL\x10\x03\x12\x14\n\x10NAVIGATION_WHEEL\x10\x04\x42\x12\n\x10\x63om.android.aaptb\x06proto3')
+)
+
+
+
+_CONFIGURATION_LAYOUTDIRECTION = _descriptor.EnumDescriptor(
+  name='LayoutDirection',
+  full_name='aapt.pb.Configuration.LayoutDirection',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_LTR', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_RTL', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1119,
+  serialized_end=1216,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_LAYOUTDIRECTION)
+
+_CONFIGURATION_SCREENLAYOUTSIZE = _descriptor.EnumDescriptor(
+  name='ScreenLayoutSize',
+  full_name='aapt.pb.Configuration.ScreenLayoutSize',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_SMALL', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_NORMAL', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_LARGE', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_XLARGE', index=4, number=4,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1219,
+  serialized_end=1389,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTSIZE)
+
+_CONFIGURATION_SCREENLAYOUTLONG = _descriptor.EnumDescriptor(
+  name='ScreenLayoutLong',
+  full_name='aapt.pb.Configuration.ScreenLayoutLong',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_LONG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_NOTLONG', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1391,
+  serialized_end=1500,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTLONG)
+
+_CONFIGURATION_SCREENROUND = _descriptor.EnumDescriptor(
+  name='ScreenRound',
+  full_name='aapt.pb.Configuration.ScreenRound',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_ROUND', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_NOTROUND', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1502,
+  serialized_end=1590,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENROUND)
+
+_CONFIGURATION_WIDECOLORGAMUT = _descriptor.EnumDescriptor(
+  name='WideColorGamut',
+  full_name='aapt.pb.Configuration.WideColorGamut',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_WIDECG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_NOWIDECG', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1592,
+  serialized_end=1696,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_WIDECOLORGAMUT)
+
+_CONFIGURATION_HDR = _descriptor.EnumDescriptor(
+  name='Hdr',
+  full_name='aapt.pb.Configuration.Hdr',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='HDR_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='HDR_HIGHDR', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='HDR_LOWDR', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1698,
+  serialized_end=1749,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_HDR)
+
+_CONFIGURATION_ORIENTATION = _descriptor.EnumDescriptor(
+  name='Orientation',
+  full_name='aapt.pb.Configuration.Orientation',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_PORT', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_LAND', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_SQUARE', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1751,
+  serialized_end=1855,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_ORIENTATION)
+
+_CONFIGURATION_UIMODETYPE = _descriptor.EnumDescriptor(
+  name='UiModeType',
+  full_name='aapt.pb.Configuration.UiModeType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_NORMAL', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_DESK', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_CAR', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_TELEVISION', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_APPLIANCE', index=5, number=5,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_WATCH', index=6, number=6,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_VRHEADSET', index=7, number=7,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1858,
+  serialized_end=2073,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODETYPE)
+
+_CONFIGURATION_UIMODENIGHT = _descriptor.EnumDescriptor(
+  name='UiModeNight',
+  full_name='aapt.pb.Configuration.UiModeNight',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_NIGHT', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_NOTNIGHT', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2075,
+  serialized_end=2166,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODENIGHT)
+
+_CONFIGURATION_TOUCHSCREEN = _descriptor.EnumDescriptor(
+  name='Touchscreen',
+  full_name='aapt.pb.Configuration.Touchscreen',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_NOTOUCH', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_STYLUS', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_FINGER', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2168,
+  serialized_end=2277,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_TOUCHSCREEN)
+
+_CONFIGURATION_KEYSHIDDEN = _descriptor.EnumDescriptor(
+  name='KeysHidden',
+  full_name='aapt.pb.Configuration.KeysHidden',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSEXPOSED', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSHIDDEN', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSSOFT', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2279,
+  serialized_end=2397,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYSHIDDEN)
+
+_CONFIGURATION_KEYBOARD = _descriptor.EnumDescriptor(
+  name='Keyboard',
+  full_name='aapt.pb.Configuration.Keyboard',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_NOKEYS', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_QWERTY', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_TWELVEKEY', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2399,
+  serialized_end=2495,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYBOARD)
+
+_CONFIGURATION_NAVHIDDEN = _descriptor.EnumDescriptor(
+  name='NavHidden',
+  full_name='aapt.pb.Configuration.NavHidden',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_NAVEXPOSED', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_NAVHIDDEN', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2497,
+  serialized_end=2583,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVHIDDEN)
+
+_CONFIGURATION_NAVIGATION = _descriptor.EnumDescriptor(
+  name='Navigation',
+  full_name='aapt.pb.Configuration.Navigation',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_NONAV', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_DPAD', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_TRACKBALL', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_WHEEL', index=4, number=4,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2585,
+  serialized_end=2710,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVIGATION)
+
+
+_CONFIGURATION = _descriptor.Descriptor(
+  name='Configuration',
+  full_name='aapt.pb.Configuration',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='mcc', full_name='aapt.pb.Configuration.mcc', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='mnc', full_name='aapt.pb.Configuration.mnc', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='locale', full_name='aapt.pb.Configuration.locale', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='layout_direction', full_name='aapt.pb.Configuration.layout_direction', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_width', full_name='aapt.pb.Configuration.screen_width', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_height', full_name='aapt.pb.Configuration.screen_height', index=5,
+      number=6, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_width_dp', full_name='aapt.pb.Configuration.screen_width_dp', index=6,
+      number=7, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_height_dp', full_name='aapt.pb.Configuration.screen_height_dp', index=7,
+      number=8, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='smallest_screen_width_dp', full_name='aapt.pb.Configuration.smallest_screen_width_dp', index=8,
+      number=9, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_layout_size', full_name='aapt.pb.Configuration.screen_layout_size', index=9,
+      number=10, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_layout_long', full_name='aapt.pb.Configuration.screen_layout_long', index=10,
+      number=11, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_round', full_name='aapt.pb.Configuration.screen_round', index=11,
+      number=12, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='wide_color_gamut', full_name='aapt.pb.Configuration.wide_color_gamut', index=12,
+      number=13, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='hdr', full_name='aapt.pb.Configuration.hdr', index=13,
+      number=14, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='orientation', full_name='aapt.pb.Configuration.orientation', index=14,
+      number=15, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ui_mode_type', full_name='aapt.pb.Configuration.ui_mode_type', index=15,
+      number=16, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ui_mode_night', full_name='aapt.pb.Configuration.ui_mode_night', index=16,
+      number=17, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='density', full_name='aapt.pb.Configuration.density', index=17,
+      number=18, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='touchscreen', full_name='aapt.pb.Configuration.touchscreen', index=18,
+      number=19, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='keys_hidden', full_name='aapt.pb.Configuration.keys_hidden', index=19,
+      number=20, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='keyboard', full_name='aapt.pb.Configuration.keyboard', index=20,
+      number=21, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='nav_hidden', full_name='aapt.pb.Configuration.nav_hidden', index=21,
+      number=22, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='navigation', full_name='aapt.pb.Configuration.navigation', index=22,
+      number=23, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sdk_version', full_name='aapt.pb.Configuration.sdk_version', index=23,
+      number=24, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='product', full_name='aapt.pb.Configuration.product', index=24,
+      number=25, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _CONFIGURATION_LAYOUTDIRECTION,
+    _CONFIGURATION_SCREENLAYOUTSIZE,
+    _CONFIGURATION_SCREENLAYOUTLONG,
+    _CONFIGURATION_SCREENROUND,
+    _CONFIGURATION_WIDECOLORGAMUT,
+    _CONFIGURATION_HDR,
+    _CONFIGURATION_ORIENTATION,
+    _CONFIGURATION_UIMODETYPE,
+    _CONFIGURATION_UIMODENIGHT,
+    _CONFIGURATION_TOUCHSCREEN,
+    _CONFIGURATION_KEYSHIDDEN,
+    _CONFIGURATION_KEYBOARD,
+    _CONFIGURATION_NAVHIDDEN,
+    _CONFIGURATION_NAVIGATION,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=61,
+  serialized_end=2710,
+)
+
+_CONFIGURATION.fields_by_name['layout_direction'].enum_type = _CONFIGURATION_LAYOUTDIRECTION
+_CONFIGURATION.fields_by_name['screen_layout_size'].enum_type = _CONFIGURATION_SCREENLAYOUTSIZE
+_CONFIGURATION.fields_by_name['screen_layout_long'].enum_type = _CONFIGURATION_SCREENLAYOUTLONG
+_CONFIGURATION.fields_by_name['screen_round'].enum_type = _CONFIGURATION_SCREENROUND
+_CONFIGURATION.fields_by_name['wide_color_gamut'].enum_type = _CONFIGURATION_WIDECOLORGAMUT
+_CONFIGURATION.fields_by_name['hdr'].enum_type = _CONFIGURATION_HDR
+_CONFIGURATION.fields_by_name['orientation'].enum_type = _CONFIGURATION_ORIENTATION
+_CONFIGURATION.fields_by_name['ui_mode_type'].enum_type = _CONFIGURATION_UIMODETYPE
+_CONFIGURATION.fields_by_name['ui_mode_night'].enum_type = _CONFIGURATION_UIMODENIGHT
+_CONFIGURATION.fields_by_name['touchscreen'].enum_type = _CONFIGURATION_TOUCHSCREEN
+_CONFIGURATION.fields_by_name['keys_hidden'].enum_type = _CONFIGURATION_KEYSHIDDEN
+_CONFIGURATION.fields_by_name['keyboard'].enum_type = _CONFIGURATION_KEYBOARD
+_CONFIGURATION.fields_by_name['nav_hidden'].enum_type = _CONFIGURATION_NAVHIDDEN
+_CONFIGURATION.fields_by_name['navigation'].enum_type = _CONFIGURATION_NAVIGATION
+_CONFIGURATION_LAYOUTDIRECTION.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTSIZE.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTLONG.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENROUND.containing_type = _CONFIGURATION
+_CONFIGURATION_WIDECOLORGAMUT.containing_type = _CONFIGURATION
+_CONFIGURATION_HDR.containing_type = _CONFIGURATION
+_CONFIGURATION_ORIENTATION.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODETYPE.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODENIGHT.containing_type = _CONFIGURATION
+_CONFIGURATION_TOUCHSCREEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYSHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYBOARD.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVIGATION.containing_type = _CONFIGURATION
+DESCRIPTOR.message_types_by_name['Configuration'] = _CONFIGURATION
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Configuration = _reflection.GeneratedProtocolMessageType('Configuration', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGURATION,
+  '__module__' : 'frameworks.base.tools.aapt2.Configuration_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Configuration)
+  })
+_sym_db.RegisterMessage(Configuration)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/gyp/proto/README.md b/src/build/android/gyp/proto/README.md
new file mode 100644
index 0000000..6850410
--- /dev/null
+++ b/src/build/android/gyp/proto/README.md
@@ -0,0 +1,13 @@
+# Protos
+These protos are generated from Resources.proto and Configuration.proto from the
+Android repo. They are found in the frameworks/base/tools/aapt2/ directory. To
+regenerate these if there are changes, run this command from the root of an
+Android checkout:
+
+   protoc --python_out=some_dir frameworks/base/tools/aapt2/Resources.proto \
+      frameworks/base/tools/aapt2/Configuration.proto
+
+Then copy the resulting \*pb2.py files from some_dir here. To make sure
+Resources_pb2.py is able to import Configuration_pb2.py, replace the
+"from frameworks.base.tools.aapt2" portion of the import statement with
+"from ." so it will instead be imported from the current directory.
diff --git a/src/build/android/gyp/proto/Resources_pb2.py b/src/build/android/gyp/proto/Resources_pb2.py
new file mode 100644
index 0000000..3bbd702
--- /dev/null
+++ b/src/build/android/gyp/proto/Resources_pb2.py
@@ -0,0 +1,2779 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Resources.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='frameworks/base/tools/aapt2/Resources.proto',
+  package='aapt.pb',
+  syntax='proto3',
+  serialized_options=_b('\n\020com.android.aapt'),
+  serialized_pb=_b('\n+frameworks/base/tools/aapt2/Resources.proto\x12\x07\x61\x61pt.pb\x1a/frameworks/base/tools/aapt2/Configuration.proto\"\x1a\n\nStringPool\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"<\n\x0eSourcePosition\x12\x13\n\x0bline_number\x18\x01 \x01(\r\x12\x15\n\rcolumn_number\x18\x02 \x01(\r\"E\n\x06Source\x12\x10\n\x08path_idx\x18\x01 \x01(\r\x12)\n\x08position\x18\x02 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"0\n\x0fToolFingerprint\x12\x0c\n\x04tool\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"\xbb\x01\n\rResourceTable\x12(\n\x0bsource_pool\x18\x01 \x01(\x0b\x32\x13.aapt.pb.StringPool\x12!\n\x07package\x18\x02 \x03(\x0b\x32\x10.aapt.pb.Package\x12)\n\x0boverlayable\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Overlayable\x12\x32\n\x10tool_fingerprint\x18\x04 \x03(\x0b\x32\x18.aapt.pb.ToolFingerprint\"\x17\n\tPackageId\x12\n\n\x02id\x18\x01 \x01(\r\"d\n\x07Package\x12&\n\npackage_id\x18\x01 \x01(\x0b\x32\x12.aapt.pb.PackageId\x12\x14\n\x0cpackage_name\x18\x02 \x01(\t\x12\x1b\n\x04type\x18\x03 \x03(\x0b\x32\r.aapt.pb.Type\"\x14\n\x06TypeId\x12\n\n\x02id\x18\x01 \x01(\r\"U\n\x04Type\x12 \n\x07type_id\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.TypeId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1d\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x0e.aapt.pb.Entry\"\x97\x01\n\nVisibility\x12(\n\x05level\x18\x01 \x01(\x0e\x32\x19.aapt.pb.Visibility.Level\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x03 \x01(\t\"-\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PRIVATE\x10\x01\x12\n\n\x06PUBLIC\x10\x02\"<\n\x08\x41llowNew\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\"K\n\x0bOverlayable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\r\n\x05\x61\x63tor\x18\x03 \x01(\t\"\xf3\x01\n\x0fOverlayableItem\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12/\n\x06policy\x18\x03 \x03(\x0e\x32\x1f.aapt.pb.OverlayableItem.Policy\x12\x17\n\x0foverlayable_idx\x18\x04 \x01(\r\"d\n\x06Policy\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06PUBLIC\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\n\n\x06VENDOR\x10\x03\x12\x0b\n\x07PRODUCT\x10\x04\x12\r\n\tSIGNATURE\x10\x05\x12\x07\n\x03ODM\x10\x06\x12\x07\n\x03OEM\x10\x07\"\x15\n\x07\x45ntryId\x12\n\n\x02id\x18\x01 \x01(\r\"\xe8\x01\n\x05\x45ntry\x12\"\n\x08\x65ntry_id\x18\x01 \x01(\x0b\x32\x10.aapt.pb.EntryId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\nvisibility\x18\x03 \x01(\x0b\x32\x13.aapt.pb.Visibility\x12$\n\tallow_new\x18\x04 \x01(\x0b\x32\x11.aapt.pb.AllowNew\x12\x32\n\x10overlayable_item\x18\x05 \x01(\x0b\x32\x18.aapt.pb.OverlayableItem\x12*\n\x0c\x63onfig_value\x18\x06 \x03(\x0b\x32\x14.aapt.pb.ConfigValue\"T\n\x0b\x43onfigValue\x12&\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x16.aapt.pb.Configuration\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.Value\"\xa1\x01\n\x05Value\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x0c\n\x04weak\x18\x03 \x01(\x08\x12\x1d\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.ItemH\x00\x12\x30\n\x0e\x63ompound_value\x18\x05 \x01(\x0b\x32\x16.aapt.pb.CompoundValueH\x00\x42\x07\n\x05value\"\x8d\x02\n\x04Item\x12!\n\x03ref\x18\x01 \x01(\x0b\x32\x12.aapt.pb.ReferenceH\x00\x12\x1e\n\x03str\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.StringH\x00\x12%\n\x07raw_str\x18\x03 \x01(\x0b\x32\x12.aapt.pb.RawStringH\x00\x12+\n\nstyled_str\x18\x04 \x01(\x0b\x32\x15.aapt.pb.StyledStringH\x00\x12&\n\x04\x66ile\x18\x05 \x01(\x0b\x32\x16.aapt.pb.FileReferenceH\x00\x12\x19\n\x02id\x18\x06 \x01(\x0b\x32\x0b.aapt.pb.IdH\x00\x12\"\n\x04prim\x18\x07 \x01(\x0b\x32\x12.aapt.pb.PrimitiveH\x00\x42\x07\n\x05value\"\xca\x01\n\rCompoundValue\x12\"\n\x04\x61ttr\x18\x01 \x01(\x0b\x32\x12.aapt.pb.AttributeH\x00\x12\x1f\n\x05style\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.StyleH\x00\x12\'\n\tstyleable\x18\x03 \x01(\x0b\x32\x12.aapt.pb.StyleableH\x00\x12\x1f\n\x05\x61rray\x18\x04 \x01(\x0b\x32\x0e.aapt.pb.ArrayH\x00\x12!\n\x06plural\x18\x05 \x01(\x0b\x32\x0f.aapt.pb.PluralH\x00\x42\x07\n\x05value\"\x18\n\x07\x42oolean\x12\r\n\x05value\x18\x01 \x01(\x08\"\xa9\x01\n\tReference\x12%\n\x04type\x18\x01 \x01(\x0e\x32\x17.aapt.pb.Reference.Type\x12\n\n\x02id\x18\x02 \x01(\r\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07private\x18\x04 \x01(\x08\x12$\n\nis_dynamic\x18\x05 \x01(\x0b\x32\x10.aapt.pb.Boolean\"$\n\x04Type\x12\r\n\tREFERENCE\x10\x00\x12\r\n\tATTRIBUTE\x10\x01\"\x04\n\x02Id\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x1a\n\tRawString\x12\r\n\x05value\x18\x01 \x01(\t\"\x83\x01\n\x0cStyledString\x12\r\n\x05value\x18\x01 \x01(\t\x12(\n\x04span\x18\x02 \x03(\x0b\x32\x1a.aapt.pb.StyledString.Span\x1a:\n\x04Span\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x12\n\nfirst_char\x18\x02 \x01(\r\x12\x11\n\tlast_char\x18\x03 \x01(\r\"\x85\x01\n\rFileReference\x12\x0c\n\x04path\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.aapt.pb.FileReference.Type\";\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03PNG\x10\x01\x12\x0e\n\nBINARY_XML\x10\x02\x12\r\n\tPROTO_XML\x10\x03\"\x83\x04\n\tPrimitive\x12\x31\n\nnull_value\x18\x01 \x01(\x0b\x32\x1b.aapt.pb.Primitive.NullTypeH\x00\x12\x33\n\x0b\x65mpty_value\x18\x02 \x01(\x0b\x32\x1c.aapt.pb.Primitive.EmptyTypeH\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x19\n\x0f\x64imension_value\x18\r \x01(\rH\x00\x12\x18\n\x0e\x66raction_value\x18\x0e \x01(\rH\x00\x12\x1b\n\x11int_decimal_value\x18\x06 \x01(\x05H\x00\x12\x1f\n\x15int_hexadecimal_value\x18\x07 \x01(\rH\x00\x12\x17\n\rboolean_value\x18\x08 \x01(\x08H\x00\x12\x1b\n\x11\x63olor_argb8_value\x18\t \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb8_value\x18\n \x01(\rH\x00\x12\x1b\n\x11\x63olor_argb4_value\x18\x0b \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb4_value\x18\x0c \x01(\rH\x00\x12(\n\x1a\x64imension_value_deprecated\x18\x04 \x01(\x02\x42\x02\x18\x01H\x00\x12\'\n\x19\x66raction_value_deprecated\x18\x05 \x01(\x02\x42\x02\x18\x01H\x00\x1a\n\n\x08NullType\x1a\x0b\n\tEmptyTypeB\r\n\x0boneof_value\"\x90\x03\n\tAttribute\x12\x14\n\x0c\x66ormat_flags\x18\x01 \x01(\r\x12\x0f\n\x07min_int\x18\x02 \x01(\x05\x12\x0f\n\x07max_int\x18\x03 \x01(\x05\x12)\n\x06symbol\x18\x04 \x03(\x0b\x32\x19.aapt.pb.Attribute.Symbol\x1ay\n\x06Symbol\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04name\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\r\n\x05value\x18\x04 \x01(\r\x12\x0c\n\x04type\x18\x05 \x01(\r\"\xa4\x01\n\x0b\x46ormatFlags\x12\x08\n\x04NONE\x10\x00\x12\t\n\x03\x41NY\x10\xff\xff\x03\x12\r\n\tREFERENCE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x0b\n\x07INTEGER\x10\x04\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\t\n\x05\x43OLOR\x10\x10\x12\t\n\x05\x46LOAT\x10 \x12\r\n\tDIMENSION\x10@\x12\r\n\x08\x46RACTION\x10\x80\x01\x12\n\n\x04\x45NUM\x10\x80\x80\x04\x12\x0b\n\x05\x46LAGS\x10\x80\x80\x08\"\xf1\x01\n\x05Style\x12\"\n\x06parent\x18\x01 \x01(\x0b\x32\x12.aapt.pb.Reference\x12&\n\rparent_source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12#\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Style.Entry\x1aw\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1f\n\x03key\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"\x91\x01\n\tStyleable\x12\'\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x18.aapt.pb.Styleable.Entry\x1a[\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04\x61ttr\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\"\x8a\x01\n\x05\x41rray\x12\'\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x16.aapt.pb.Array.Element\x1aX\n\x07\x45lement\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1b\n\x04item\x18\x03 \x01(\x0b\x32\r.aapt.pb.Item\"\xef\x01\n\x06Plural\x12$\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x15.aapt.pb.Plural.Entry\x1a|\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12$\n\x05\x61rity\x18\x03 \x01(\x0e\x32\x15.aapt.pb.Plural.Arity\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"A\n\x05\x41rity\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\x12\x07\n\x03TWO\x10\x02\x12\x07\n\x03\x46\x45W\x10\x03\x12\x08\n\x04MANY\x10\x04\x12\t\n\x05OTHER\x10\x05\"r\n\x07XmlNode\x12&\n\x07\x65lement\x18\x01 \x01(\x0b\x32\x13.aapt.pb.XmlElementH\x00\x12\x0e\n\x04text\x18\x02 \x01(\tH\x00\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePositionB\x06\n\x04node\"\xb2\x01\n\nXmlElement\x12\x34\n\x15namespace_declaration\x18\x01 \x03(\x0b\x32\x15.aapt.pb.XmlNamespace\x12\x15\n\rnamespace_uri\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12(\n\tattribute\x18\x04 \x03(\x0b\x32\x15.aapt.pb.XmlAttribute\x12\x1f\n\x05\x63hild\x18\x05 \x03(\x0b\x32\x10.aapt.pb.XmlNode\"T\n\x0cXmlNamespace\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"\xa6\x01\n\x0cXmlAttribute\x12\x15\n\rnamespace_uri\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\'\n\x06source\x18\x04 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\x12\x13\n\x0bresource_id\x18\x05 \x01(\r\x12$\n\rcompiled_item\x18\x06 \x01(\x0b\x32\r.aapt.pb.ItemB\x12\n\x10\x63om.android.aaptb\x06proto3')
+  ,
+  dependencies=[frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2.DESCRIPTOR,])
+
+
+
+_VISIBILITY_LEVEL = _descriptor.EnumDescriptor(
+  name='Level',
+  full_name='aapt.pb.Visibility.Level',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIVATE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PUBLIC', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=849,
+  serialized_end=894,
+)
+_sym_db.RegisterEnumDescriptor(_VISIBILITY_LEVEL)
+
+_OVERLAYABLEITEM_POLICY = _descriptor.EnumDescriptor(
+  name='Policy',
+  full_name='aapt.pb.OverlayableItem.Policy',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NONE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PUBLIC', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SYSTEM', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='VENDOR', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRODUCT', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SIGNATURE', index=5, number=5,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ODM', index=6, number=6,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OEM', index=7, number=7,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1179,
+  serialized_end=1279,
+)
+_sym_db.RegisterEnumDescriptor(_OVERLAYABLEITEM_POLICY)
+
+_REFERENCE_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='aapt.pb.Reference.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='REFERENCE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ATTRIBUTE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2426,
+  serialized_end=2462,
+)
+_sym_db.RegisterEnumDescriptor(_REFERENCE_TYPE)
+
+_FILEREFERENCE_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='aapt.pb.FileReference.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PNG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BINARY_XML', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PROTO_XML', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2732,
+  serialized_end=2791,
+)
+_sym_db.RegisterEnumDescriptor(_FILEREFERENCE_TYPE)
+
+_ATTRIBUTE_FORMATFLAGS = _descriptor.EnumDescriptor(
+  name='FormatFlags',
+  full_name='aapt.pb.Attribute.FormatFlags',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NONE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ANY', index=1, number=65535,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REFERENCE', index=2, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=3, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INTEGER', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BOOLEAN', index=5, number=8,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COLOR', index=6, number=16,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FLOAT', index=7, number=32,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DIMENSION', index=8, number=64,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FRACTION', index=9, number=128,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ENUM', index=10, number=65536,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FLAGS', index=11, number=131072,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3548,
+  serialized_end=3712,
+)
+_sym_db.RegisterEnumDescriptor(_ATTRIBUTE_FORMATFLAGS)
+
+_PLURAL_ARITY = _descriptor.EnumDescriptor(
+  name='Arity',
+  full_name='aapt.pb.Plural.Arity',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='ZERO', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ONE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TWO', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FEW', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='MANY', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTHER', index=5, number=5,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4422,
+  serialized_end=4487,
+)
+_sym_db.RegisterEnumDescriptor(_PLURAL_ARITY)
+
+
+_STRINGPOOL = _descriptor.Descriptor(
+  name='StringPool',
+  full_name='aapt.pb.StringPool',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='data', full_name='aapt.pb.StringPool.data', index=0,
+      number=1, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=105,
+  serialized_end=131,
+)
+
+
+_SOURCEPOSITION = _descriptor.Descriptor(
+  name='SourcePosition',
+  full_name='aapt.pb.SourcePosition',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='line_number', full_name='aapt.pb.SourcePosition.line_number', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='column_number', full_name='aapt.pb.SourcePosition.column_number', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=133,
+  serialized_end=193,
+)
+
+
+_SOURCE = _descriptor.Descriptor(
+  name='Source',
+  full_name='aapt.pb.Source',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path_idx', full_name='aapt.pb.Source.path_idx', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='position', full_name='aapt.pb.Source.position', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=195,
+  serialized_end=264,
+)
+
+
+_TOOLFINGERPRINT = _descriptor.Descriptor(
+  name='ToolFingerprint',
+  full_name='aapt.pb.ToolFingerprint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='tool', full_name='aapt.pb.ToolFingerprint.tool', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='aapt.pb.ToolFingerprint.version', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=266,
+  serialized_end=314,
+)
+
+
+_RESOURCETABLE = _descriptor.Descriptor(
+  name='ResourceTable',
+  full_name='aapt.pb.ResourceTable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source_pool', full_name='aapt.pb.ResourceTable.source_pool', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='package', full_name='aapt.pb.ResourceTable.package', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable', full_name='aapt.pb.ResourceTable.overlayable', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='tool_fingerprint', full_name='aapt.pb.ResourceTable.tool_fingerprint', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=317,
+  serialized_end=504,
+)
+
+
+_PACKAGEID = _descriptor.Descriptor(
+  name='PackageId',
+  full_name='aapt.pb.PackageId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.PackageId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=506,
+  serialized_end=529,
+)
+
+
+_PACKAGE = _descriptor.Descriptor(
+  name='Package',
+  full_name='aapt.pb.Package',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='package_id', full_name='aapt.pb.Package.package_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='package_name', full_name='aapt.pb.Package.package_name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Package.type', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=531,
+  serialized_end=631,
+)
+
+
+_TYPEID = _descriptor.Descriptor(
+  name='TypeId',
+  full_name='aapt.pb.TypeId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.TypeId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=633,
+  serialized_end=653,
+)
+
+
+_TYPE = _descriptor.Descriptor(
+  name='Type',
+  full_name='aapt.pb.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type_id', full_name='aapt.pb.Type.type_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Type.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Type.entry', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=655,
+  serialized_end=740,
+)
+
+
+_VISIBILITY = _descriptor.Descriptor(
+  name='Visibility',
+  full_name='aapt.pb.Visibility',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='level', full_name='aapt.pb.Visibility.level', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Visibility.source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Visibility.comment', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _VISIBILITY_LEVEL,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=743,
+  serialized_end=894,
+)
+
+
+_ALLOWNEW = _descriptor.Descriptor(
+  name='AllowNew',
+  full_name='aapt.pb.AllowNew',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.AllowNew.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.AllowNew.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=896,
+  serialized_end=956,
+)
+
+
+_OVERLAYABLE = _descriptor.Descriptor(
+  name='Overlayable',
+  full_name='aapt.pb.Overlayable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Overlayable.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Overlayable.source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='actor', full_name='aapt.pb.Overlayable.actor', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=958,
+  serialized_end=1033,
+)
+
+
+_OVERLAYABLEITEM = _descriptor.Descriptor(
+  name='OverlayableItem',
+  full_name='aapt.pb.OverlayableItem',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.OverlayableItem.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.OverlayableItem.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='policy', full_name='aapt.pb.OverlayableItem.policy', index=2,
+      number=3, type=14, cpp_type=8, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable_idx', full_name='aapt.pb.OverlayableItem.overlayable_idx', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _OVERLAYABLEITEM_POLICY,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1036,
+  serialized_end=1279,
+)
+
+
+_ENTRYID = _descriptor.Descriptor(
+  name='EntryId',
+  full_name='aapt.pb.EntryId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.EntryId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1281,
+  serialized_end=1302,
+)
+
+
+_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry_id', full_name='aapt.pb.Entry.entry_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Entry.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='visibility', full_name='aapt.pb.Entry.visibility', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='allow_new', full_name='aapt.pb.Entry.allow_new', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable_item', full_name='aapt.pb.Entry.overlayable_item', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='config_value', full_name='aapt.pb.Entry.config_value', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1305,
+  serialized_end=1537,
+)
+
+
+_CONFIGVALUE = _descriptor.Descriptor(
+  name='ConfigValue',
+  full_name='aapt.pb.ConfigValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='config', full_name='aapt.pb.ConfigValue.config', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.ConfigValue.value', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1539,
+  serialized_end=1623,
+)
+
+
+_VALUE = _descriptor.Descriptor(
+  name='Value',
+  full_name='aapt.pb.Value',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Value.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Value.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='weak', full_name='aapt.pb.Value.weak', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Value.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='compound_value', full_name='aapt.pb.Value.compound_value', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.Value.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1626,
+  serialized_end=1787,
+)
+
+
+_ITEM = _descriptor.Descriptor(
+  name='Item',
+  full_name='aapt.pb.Item',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ref', full_name='aapt.pb.Item.ref', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='str', full_name='aapt.pb.Item.str', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='raw_str', full_name='aapt.pb.Item.raw_str', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='styled_str', full_name='aapt.pb.Item.styled_str', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='file', full_name='aapt.pb.Item.file', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.Item.id', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='prim', full_name='aapt.pb.Item.prim', index=6,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.Item.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1790,
+  serialized_end=2059,
+)
+
+
+_COMPOUNDVALUE = _descriptor.Descriptor(
+  name='CompoundValue',
+  full_name='aapt.pb.CompoundValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='attr', full_name='aapt.pb.CompoundValue.attr', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='style', full_name='aapt.pb.CompoundValue.style', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='styleable', full_name='aapt.pb.CompoundValue.styleable', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='array', full_name='aapt.pb.CompoundValue.array', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='plural', full_name='aapt.pb.CompoundValue.plural', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.CompoundValue.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=2062,
+  serialized_end=2264,
+)
+
+
+_BOOLEAN = _descriptor.Descriptor(
+  name='Boolean',
+  full_name='aapt.pb.Boolean',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.Boolean.value', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2266,
+  serialized_end=2290,
+)
+
+
+_REFERENCE = _descriptor.Descriptor(
+  name='Reference',
+  full_name='aapt.pb.Reference',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Reference.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.Reference.id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Reference.name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='private', full_name='aapt.pb.Reference.private', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='is_dynamic', full_name='aapt.pb.Reference.is_dynamic', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _REFERENCE_TYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2293,
+  serialized_end=2462,
+)
+
+
+_ID = _descriptor.Descriptor(
+  name='Id',
+  full_name='aapt.pb.Id',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2464,
+  serialized_end=2468,
+)
+
+
+_STRING = _descriptor.Descriptor(
+  name='String',
+  full_name='aapt.pb.String',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.String.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2470,
+  serialized_end=2493,
+)
+
+
+_RAWSTRING = _descriptor.Descriptor(
+  name='RawString',
+  full_name='aapt.pb.RawString',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.RawString.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2495,
+  serialized_end=2521,
+)
+
+
+_STYLEDSTRING_SPAN = _descriptor.Descriptor(
+  name='Span',
+  full_name='aapt.pb.StyledString.Span',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='tag', full_name='aapt.pb.StyledString.Span.tag', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='first_char', full_name='aapt.pb.StyledString.Span.first_char', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='last_char', full_name='aapt.pb.StyledString.Span.last_char', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2597,
+  serialized_end=2655,
+)
+
+_STYLEDSTRING = _descriptor.Descriptor(
+  name='StyledString',
+  full_name='aapt.pb.StyledString',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.StyledString.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='span', full_name='aapt.pb.StyledString.span', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLEDSTRING_SPAN, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2524,
+  serialized_end=2655,
+)
+
+
+_FILEREFERENCE = _descriptor.Descriptor(
+  name='FileReference',
+  full_name='aapt.pb.FileReference',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path', full_name='aapt.pb.FileReference.path', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.FileReference.type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FILEREFERENCE_TYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2658,
+  serialized_end=2791,
+)
+
+
+_PRIMITIVE_NULLTYPE = _descriptor.Descriptor(
+  name='NullType',
+  full_name='aapt.pb.Primitive.NullType',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3271,
+  serialized_end=3281,
+)
+
+_PRIMITIVE_EMPTYTYPE = _descriptor.Descriptor(
+  name='EmptyType',
+  full_name='aapt.pb.Primitive.EmptyType',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3283,
+  serialized_end=3294,
+)
+
+_PRIMITIVE = _descriptor.Descriptor(
+  name='Primitive',
+  full_name='aapt.pb.Primitive',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='null_value', full_name='aapt.pb.Primitive.null_value', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='empty_value', full_name='aapt.pb.Primitive.empty_value', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='float_value', full_name='aapt.pb.Primitive.float_value', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dimension_value', full_name='aapt.pb.Primitive.dimension_value', index=3,
+      number=13, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fraction_value', full_name='aapt.pb.Primitive.fraction_value', index=4,
+      number=14, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='int_decimal_value', full_name='aapt.pb.Primitive.int_decimal_value', index=5,
+      number=6, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='int_hexadecimal_value', full_name='aapt.pb.Primitive.int_hexadecimal_value', index=6,
+      number=7, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='boolean_value', full_name='aapt.pb.Primitive.boolean_value', index=7,
+      number=8, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_argb8_value', full_name='aapt.pb.Primitive.color_argb8_value', index=8,
+      number=9, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_rgb8_value', full_name='aapt.pb.Primitive.color_rgb8_value', index=9,
+      number=10, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_argb4_value', full_name='aapt.pb.Primitive.color_argb4_value', index=10,
+      number=11, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_rgb4_value', full_name='aapt.pb.Primitive.color_rgb4_value', index=11,
+      number=12, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dimension_value_deprecated', full_name='aapt.pb.Primitive.dimension_value_deprecated', index=12,
+      number=4, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=_b('\030\001'), file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fraction_value_deprecated', full_name='aapt.pb.Primitive.fraction_value_deprecated', index=13,
+      number=5, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=_b('\030\001'), file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PRIMITIVE_NULLTYPE, _PRIMITIVE_EMPTYTYPE, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='oneof_value', full_name='aapt.pb.Primitive.oneof_value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=2794,
+  serialized_end=3309,
+)
+
+
+_ATTRIBUTE_SYMBOL = _descriptor.Descriptor(
+  name='Symbol',
+  full_name='aapt.pb.Attribute.Symbol',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Attribute.Symbol.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Attribute.Symbol.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Attribute.Symbol.name', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.Attribute.Symbol.value', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Attribute.Symbol.type', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3424,
+  serialized_end=3545,
+)
+
+_ATTRIBUTE = _descriptor.Descriptor(
+  name='Attribute',
+  full_name='aapt.pb.Attribute',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='format_flags', full_name='aapt.pb.Attribute.format_flags', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='min_int', full_name='aapt.pb.Attribute.min_int', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='max_int', full_name='aapt.pb.Attribute.max_int', index=2,
+      number=3, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='symbol', full_name='aapt.pb.Attribute.symbol', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_ATTRIBUTE_SYMBOL, ],
+  enum_types=[
+    _ATTRIBUTE_FORMATFLAGS,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3312,
+  serialized_end=3712,
+)
+
+
+_STYLE_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Style.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Style.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Style.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='key', full_name='aapt.pb.Style.Entry.key', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Style.Entry.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3837,
+  serialized_end=3956,
+)
+
+_STYLE = _descriptor.Descriptor(
+  name='Style',
+  full_name='aapt.pb.Style',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='parent', full_name='aapt.pb.Style.parent', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='parent_source', full_name='aapt.pb.Style.parent_source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Style.entry', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLE_ENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3715,
+  serialized_end=3956,
+)
+
+
+_STYLEABLE_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Styleable.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Styleable.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Styleable.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='attr', full_name='aapt.pb.Styleable.Entry.attr', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4013,
+  serialized_end=4104,
+)
+
+_STYLEABLE = _descriptor.Descriptor(
+  name='Styleable',
+  full_name='aapt.pb.Styleable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Styleable.entry', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLEABLE_ENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3959,
+  serialized_end=4104,
+)
+
+
+_ARRAY_ELEMENT = _descriptor.Descriptor(
+  name='Element',
+  full_name='aapt.pb.Array.Element',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Array.Element.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Array.Element.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Array.Element.item', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4157,
+  serialized_end=4245,
+)
+
+_ARRAY = _descriptor.Descriptor(
+  name='Array',
+  full_name='aapt.pb.Array',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='element', full_name='aapt.pb.Array.element', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_ARRAY_ELEMENT, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4107,
+  serialized_end=4245,
+)
+
+
+_PLURAL_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Plural.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Plural.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Plural.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='arity', full_name='aapt.pb.Plural.Entry.arity', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Plural.Entry.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4296,
+  serialized_end=4420,
+)
+
+_PLURAL = _descriptor.Descriptor(
+  name='Plural',
+  full_name='aapt.pb.Plural',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Plural.entry', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PLURAL_ENTRY, ],
+  enum_types=[
+    _PLURAL_ARITY,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4248,
+  serialized_end=4487,
+)
+
+
+_XMLNODE = _descriptor.Descriptor(
+  name='XmlNode',
+  full_name='aapt.pb.XmlNode',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='element', full_name='aapt.pb.XmlNode.element', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='text', full_name='aapt.pb.XmlNode.text', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlNode.source', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='node', full_name='aapt.pb.XmlNode.node',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=4489,
+  serialized_end=4603,
+)
+
+
+_XMLELEMENT = _descriptor.Descriptor(
+  name='XmlElement',
+  full_name='aapt.pb.XmlElement',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='namespace_declaration', full_name='aapt.pb.XmlElement.namespace_declaration', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='namespace_uri', full_name='aapt.pb.XmlElement.namespace_uri', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.XmlElement.name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='attribute', full_name='aapt.pb.XmlElement.attribute', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='child', full_name='aapt.pb.XmlElement.child', index=4,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4606,
+  serialized_end=4784,
+)
+
+
+_XMLNAMESPACE = _descriptor.Descriptor(
+  name='XmlNamespace',
+  full_name='aapt.pb.XmlNamespace',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='prefix', full_name='aapt.pb.XmlNamespace.prefix', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='uri', full_name='aapt.pb.XmlNamespace.uri', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlNamespace.source', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4786,
+  serialized_end=4870,
+)
+
+
+_XMLATTRIBUTE = _descriptor.Descriptor(
+  name='XmlAttribute',
+  full_name='aapt.pb.XmlAttribute',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='namespace_uri', full_name='aapt.pb.XmlAttribute.namespace_uri', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.XmlAttribute.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.XmlAttribute.value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlAttribute.source', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='resource_id', full_name='aapt.pb.XmlAttribute.resource_id', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='compiled_item', full_name='aapt.pb.XmlAttribute.compiled_item', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4873,
+  serialized_end=5039,
+)
+
+_SOURCE.fields_by_name['position'].message_type = _SOURCEPOSITION
+_RESOURCETABLE.fields_by_name['source_pool'].message_type = _STRINGPOOL
+_RESOURCETABLE.fields_by_name['package'].message_type = _PACKAGE
+_RESOURCETABLE.fields_by_name['overlayable'].message_type = _OVERLAYABLE
+_RESOURCETABLE.fields_by_name['tool_fingerprint'].message_type = _TOOLFINGERPRINT
+_PACKAGE.fields_by_name['package_id'].message_type = _PACKAGEID
+_PACKAGE.fields_by_name['type'].message_type = _TYPE
+_TYPE.fields_by_name['type_id'].message_type = _TYPEID
+_TYPE.fields_by_name['entry'].message_type = _ENTRY
+_VISIBILITY.fields_by_name['level'].enum_type = _VISIBILITY_LEVEL
+_VISIBILITY.fields_by_name['source'].message_type = _SOURCE
+_VISIBILITY_LEVEL.containing_type = _VISIBILITY
+_ALLOWNEW.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLE.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['policy'].enum_type = _OVERLAYABLEITEM_POLICY
+_OVERLAYABLEITEM_POLICY.containing_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['entry_id'].message_type = _ENTRYID
+_ENTRY.fields_by_name['visibility'].message_type = _VISIBILITY
+_ENTRY.fields_by_name['allow_new'].message_type = _ALLOWNEW
+_ENTRY.fields_by_name['overlayable_item'].message_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['config_value'].message_type = _CONFIGVALUE
+_CONFIGVALUE.fields_by_name['config'].message_type = frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2._CONFIGURATION
+_CONFIGVALUE.fields_by_name['value'].message_type = _VALUE
+_VALUE.fields_by_name['source'].message_type = _SOURCE
+_VALUE.fields_by_name['item'].message_type = _ITEM
+_VALUE.fields_by_name['compound_value'].message_type = _COMPOUNDVALUE
+_VALUE.oneofs_by_name['value'].fields.append(
+  _VALUE.fields_by_name['item'])
+_VALUE.fields_by_name['item'].containing_oneof = _VALUE.oneofs_by_name['value']
+_VALUE.oneofs_by_name['value'].fields.append(
+  _VALUE.fields_by_name['compound_value'])
+_VALUE.fields_by_name['compound_value'].containing_oneof = _VALUE.oneofs_by_name['value']
+_ITEM.fields_by_name['ref'].message_type = _REFERENCE
+_ITEM.fields_by_name['str'].message_type = _STRING
+_ITEM.fields_by_name['raw_str'].message_type = _RAWSTRING
+_ITEM.fields_by_name['styled_str'].message_type = _STYLEDSTRING
+_ITEM.fields_by_name['file'].message_type = _FILEREFERENCE
+_ITEM.fields_by_name['id'].message_type = _ID
+_ITEM.fields_by_name['prim'].message_type = _PRIMITIVE
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['ref'])
+_ITEM.fields_by_name['ref'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['str'])
+_ITEM.fields_by_name['str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['raw_str'])
+_ITEM.fields_by_name['raw_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['styled_str'])
+_ITEM.fields_by_name['styled_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['file'])
+_ITEM.fields_by_name['file'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['id'])
+_ITEM.fields_by_name['id'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['prim'])
+_ITEM.fields_by_name['prim'].containing_oneof = _ITEM.oneofs_by_name['value']
+_COMPOUNDVALUE.fields_by_name['attr'].message_type = _ATTRIBUTE
+_COMPOUNDVALUE.fields_by_name['style'].message_type = _STYLE
+_COMPOUNDVALUE.fields_by_name['styleable'].message_type = _STYLEABLE
+_COMPOUNDVALUE.fields_by_name['array'].message_type = _ARRAY
+_COMPOUNDVALUE.fields_by_name['plural'].message_type = _PLURAL
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['attr'])
+_COMPOUNDVALUE.fields_by_name['attr'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['style'])
+_COMPOUNDVALUE.fields_by_name['style'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['styleable'])
+_COMPOUNDVALUE.fields_by_name['styleable'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['array'])
+_COMPOUNDVALUE.fields_by_name['array'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['plural'])
+_COMPOUNDVALUE.fields_by_name['plural'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_REFERENCE.fields_by_name['type'].enum_type = _REFERENCE_TYPE
+_REFERENCE.fields_by_name['is_dynamic'].message_type = _BOOLEAN
+_REFERENCE_TYPE.containing_type = _REFERENCE
+_STYLEDSTRING_SPAN.containing_type = _STYLEDSTRING
+_STYLEDSTRING.fields_by_name['span'].message_type = _STYLEDSTRING_SPAN
+_FILEREFERENCE.fields_by_name['type'].enum_type = _FILEREFERENCE_TYPE
+_FILEREFERENCE_TYPE.containing_type = _FILEREFERENCE
+_PRIMITIVE_NULLTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE_EMPTYTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE.fields_by_name['null_value'].message_type = _PRIMITIVE_NULLTYPE
+_PRIMITIVE.fields_by_name['empty_value'].message_type = _PRIMITIVE_EMPTYTYPE
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['null_value'])
+_PRIMITIVE.fields_by_name['null_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['empty_value'])
+_PRIMITIVE.fields_by_name['empty_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['float_value'])
+_PRIMITIVE.fields_by_name['float_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['dimension_value'])
+_PRIMITIVE.fields_by_name['dimension_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['fraction_value'])
+_PRIMITIVE.fields_by_name['fraction_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['int_decimal_value'])
+_PRIMITIVE.fields_by_name['int_decimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['int_hexadecimal_value'])
+_PRIMITIVE.fields_by_name['int_hexadecimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['boolean_value'])
+_PRIMITIVE.fields_by_name['boolean_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_argb8_value'])
+_PRIMITIVE.fields_by_name['color_argb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_rgb8_value'])
+_PRIMITIVE.fields_by_name['color_rgb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_argb4_value'])
+_PRIMITIVE.fields_by_name['color_argb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_rgb4_value'])
+_PRIMITIVE.fields_by_name['color_rgb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['dimension_value_deprecated'])
+_PRIMITIVE.fields_by_name['dimension_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['fraction_value_deprecated'])
+_PRIMITIVE.fields_by_name['fraction_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_ATTRIBUTE_SYMBOL.fields_by_name['source'].message_type = _SOURCE
+_ATTRIBUTE_SYMBOL.fields_by_name['name'].message_type = _REFERENCE
+_ATTRIBUTE_SYMBOL.containing_type = _ATTRIBUTE
+_ATTRIBUTE.fields_by_name['symbol'].message_type = _ATTRIBUTE_SYMBOL
+_ATTRIBUTE_FORMATFLAGS.containing_type = _ATTRIBUTE
+_STYLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLE_ENTRY.fields_by_name['key'].message_type = _REFERENCE
+_STYLE_ENTRY.fields_by_name['item'].message_type = _ITEM
+_STYLE_ENTRY.containing_type = _STYLE
+_STYLE.fields_by_name['parent'].message_type = _REFERENCE
+_STYLE.fields_by_name['parent_source'].message_type = _SOURCE
+_STYLE.fields_by_name['entry'].message_type = _STYLE_ENTRY
+_STYLEABLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLEABLE_ENTRY.fields_by_name['attr'].message_type = _REFERENCE
+_STYLEABLE_ENTRY.containing_type = _STYLEABLE
+_STYLEABLE.fields_by_name['entry'].message_type = _STYLEABLE_ENTRY
+_ARRAY_ELEMENT.fields_by_name['source'].message_type = _SOURCE
+_ARRAY_ELEMENT.fields_by_name['item'].message_type = _ITEM
+_ARRAY_ELEMENT.containing_type = _ARRAY
+_ARRAY.fields_by_name['element'].message_type = _ARRAY_ELEMENT
+_PLURAL_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_PLURAL_ENTRY.fields_by_name['arity'].enum_type = _PLURAL_ARITY
+_PLURAL_ENTRY.fields_by_name['item'].message_type = _ITEM
+_PLURAL_ENTRY.containing_type = _PLURAL
+_PLURAL.fields_by_name['entry'].message_type = _PLURAL_ENTRY
+_PLURAL_ARITY.containing_type = _PLURAL
+_XMLNODE.fields_by_name['element'].message_type = _XMLELEMENT
+_XMLNODE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLNODE.oneofs_by_name['node'].fields.append(
+  _XMLNODE.fields_by_name['element'])
+_XMLNODE.fields_by_name['element'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLNODE.oneofs_by_name['node'].fields.append(
+  _XMLNODE.fields_by_name['text'])
+_XMLNODE.fields_by_name['text'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLELEMENT.fields_by_name['namespace_declaration'].message_type = _XMLNAMESPACE
+_XMLELEMENT.fields_by_name['attribute'].message_type = _XMLATTRIBUTE
+_XMLELEMENT.fields_by_name['child'].message_type = _XMLNODE
+_XMLNAMESPACE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['compiled_item'].message_type = _ITEM
+DESCRIPTOR.message_types_by_name['StringPool'] = _STRINGPOOL
+DESCRIPTOR.message_types_by_name['SourcePosition'] = _SOURCEPOSITION
+DESCRIPTOR.message_types_by_name['Source'] = _SOURCE
+DESCRIPTOR.message_types_by_name['ToolFingerprint'] = _TOOLFINGERPRINT
+DESCRIPTOR.message_types_by_name['ResourceTable'] = _RESOURCETABLE
+DESCRIPTOR.message_types_by_name['PackageId'] = _PACKAGEID
+DESCRIPTOR.message_types_by_name['Package'] = _PACKAGE
+DESCRIPTOR.message_types_by_name['TypeId'] = _TYPEID
+DESCRIPTOR.message_types_by_name['Type'] = _TYPE
+DESCRIPTOR.message_types_by_name['Visibility'] = _VISIBILITY
+DESCRIPTOR.message_types_by_name['AllowNew'] = _ALLOWNEW
+DESCRIPTOR.message_types_by_name['Overlayable'] = _OVERLAYABLE
+DESCRIPTOR.message_types_by_name['OverlayableItem'] = _OVERLAYABLEITEM
+DESCRIPTOR.message_types_by_name['EntryId'] = _ENTRYID
+DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY
+DESCRIPTOR.message_types_by_name['ConfigValue'] = _CONFIGVALUE
+DESCRIPTOR.message_types_by_name['Value'] = _VALUE
+DESCRIPTOR.message_types_by_name['Item'] = _ITEM
+DESCRIPTOR.message_types_by_name['CompoundValue'] = _COMPOUNDVALUE
+DESCRIPTOR.message_types_by_name['Boolean'] = _BOOLEAN
+DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE
+DESCRIPTOR.message_types_by_name['Id'] = _ID
+DESCRIPTOR.message_types_by_name['String'] = _STRING
+DESCRIPTOR.message_types_by_name['RawString'] = _RAWSTRING
+DESCRIPTOR.message_types_by_name['StyledString'] = _STYLEDSTRING
+DESCRIPTOR.message_types_by_name['FileReference'] = _FILEREFERENCE
+DESCRIPTOR.message_types_by_name['Primitive'] = _PRIMITIVE
+DESCRIPTOR.message_types_by_name['Attribute'] = _ATTRIBUTE
+DESCRIPTOR.message_types_by_name['Style'] = _STYLE
+DESCRIPTOR.message_types_by_name['Styleable'] = _STYLEABLE
+DESCRIPTOR.message_types_by_name['Array'] = _ARRAY
+DESCRIPTOR.message_types_by_name['Plural'] = _PLURAL
+DESCRIPTOR.message_types_by_name['XmlNode'] = _XMLNODE
+DESCRIPTOR.message_types_by_name['XmlElement'] = _XMLELEMENT
+DESCRIPTOR.message_types_by_name['XmlNamespace'] = _XMLNAMESPACE
+DESCRIPTOR.message_types_by_name['XmlAttribute'] = _XMLATTRIBUTE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+StringPool = _reflection.GeneratedProtocolMessageType('StringPool', (_message.Message,), {
+  'DESCRIPTOR' : _STRINGPOOL,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.StringPool)
+  })
+_sym_db.RegisterMessage(StringPool)
+
+SourcePosition = _reflection.GeneratedProtocolMessageType('SourcePosition', (_message.Message,), {
+  'DESCRIPTOR' : _SOURCEPOSITION,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.SourcePosition)
+  })
+_sym_db.RegisterMessage(SourcePosition)
+
+Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), {
+  'DESCRIPTOR' : _SOURCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Source)
+  })
+_sym_db.RegisterMessage(Source)
+
+ToolFingerprint = _reflection.GeneratedProtocolMessageType('ToolFingerprint', (_message.Message,), {
+  'DESCRIPTOR' : _TOOLFINGERPRINT,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ToolFingerprint)
+  })
+_sym_db.RegisterMessage(ToolFingerprint)
+
+ResourceTable = _reflection.GeneratedProtocolMessageType('ResourceTable', (_message.Message,), {
+  'DESCRIPTOR' : _RESOURCETABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ResourceTable)
+  })
+_sym_db.RegisterMessage(ResourceTable)
+
+PackageId = _reflection.GeneratedProtocolMessageType('PackageId', (_message.Message,), {
+  'DESCRIPTOR' : _PACKAGEID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.PackageId)
+  })
+_sym_db.RegisterMessage(PackageId)
+
+Package = _reflection.GeneratedProtocolMessageType('Package', (_message.Message,), {
+  'DESCRIPTOR' : _PACKAGE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Package)
+  })
+_sym_db.RegisterMessage(Package)
+
+TypeId = _reflection.GeneratedProtocolMessageType('TypeId', (_message.Message,), {
+  'DESCRIPTOR' : _TYPEID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.TypeId)
+  })
+_sym_db.RegisterMessage(TypeId)
+
+Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), {
+  'DESCRIPTOR' : _TYPE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Type)
+  })
+_sym_db.RegisterMessage(Type)
+
+Visibility = _reflection.GeneratedProtocolMessageType('Visibility', (_message.Message,), {
+  'DESCRIPTOR' : _VISIBILITY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Visibility)
+  })
+_sym_db.RegisterMessage(Visibility)
+
+AllowNew = _reflection.GeneratedProtocolMessageType('AllowNew', (_message.Message,), {
+  'DESCRIPTOR' : _ALLOWNEW,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.AllowNew)
+  })
+_sym_db.RegisterMessage(AllowNew)
+
+Overlayable = _reflection.GeneratedProtocolMessageType('Overlayable', (_message.Message,), {
+  'DESCRIPTOR' : _OVERLAYABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Overlayable)
+  })
+_sym_db.RegisterMessage(Overlayable)
+
+OverlayableItem = _reflection.GeneratedProtocolMessageType('OverlayableItem', (_message.Message,), {
+  'DESCRIPTOR' : _OVERLAYABLEITEM,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.OverlayableItem)
+  })
+_sym_db.RegisterMessage(OverlayableItem)
+
+EntryId = _reflection.GeneratedProtocolMessageType('EntryId', (_message.Message,), {
+  'DESCRIPTOR' : _ENTRYID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.EntryId)
+  })
+_sym_db.RegisterMessage(EntryId)
+
+Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+  'DESCRIPTOR' : _ENTRY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Entry)
+  })
+_sym_db.RegisterMessage(Entry)
+
+ConfigValue = _reflection.GeneratedProtocolMessageType('ConfigValue', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGVALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ConfigValue)
+  })
+_sym_db.RegisterMessage(ConfigValue)
+
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
+  'DESCRIPTOR' : _VALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Value)
+  })
+_sym_db.RegisterMessage(Value)
+
+Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), {
+  'DESCRIPTOR' : _ITEM,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Item)
+  })
+_sym_db.RegisterMessage(Item)
+
+CompoundValue = _reflection.GeneratedProtocolMessageType('CompoundValue', (_message.Message,), {
+  'DESCRIPTOR' : _COMPOUNDVALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.CompoundValue)
+  })
+_sym_db.RegisterMessage(CompoundValue)
+
+Boolean = _reflection.GeneratedProtocolMessageType('Boolean', (_message.Message,), {
+  'DESCRIPTOR' : _BOOLEAN,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Boolean)
+  })
+_sym_db.RegisterMessage(Boolean)
+
+Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), {
+  'DESCRIPTOR' : _REFERENCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Reference)
+  })
+_sym_db.RegisterMessage(Reference)
+
+Id = _reflection.GeneratedProtocolMessageType('Id', (_message.Message,), {
+  'DESCRIPTOR' : _ID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Id)
+  })
+_sym_db.RegisterMessage(Id)
+
+String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), {
+  'DESCRIPTOR' : _STRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.String)
+  })
+_sym_db.RegisterMessage(String)
+
+RawString = _reflection.GeneratedProtocolMessageType('RawString', (_message.Message,), {
+  'DESCRIPTOR' : _RAWSTRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.RawString)
+  })
+_sym_db.RegisterMessage(RawString)
+
+StyledString = _reflection.GeneratedProtocolMessageType('StyledString', (_message.Message,), {
+
+  'Span' : _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), {
+    'DESCRIPTOR' : _STYLEDSTRING_SPAN,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.StyledString.Span)
+    })
+  ,
+  'DESCRIPTOR' : _STYLEDSTRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.StyledString)
+  })
+_sym_db.RegisterMessage(StyledString)
+_sym_db.RegisterMessage(StyledString.Span)
+
+FileReference = _reflection.GeneratedProtocolMessageType('FileReference', (_message.Message,), {
+  'DESCRIPTOR' : _FILEREFERENCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.FileReference)
+  })
+_sym_db.RegisterMessage(FileReference)
+
+Primitive = _reflection.GeneratedProtocolMessageType('Primitive', (_message.Message,), {
+
+  'NullType' : _reflection.GeneratedProtocolMessageType('NullType', (_message.Message,), {
+    'DESCRIPTOR' : _PRIMITIVE_NULLTYPE,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.NullType)
+    })
+  ,
+
+  'EmptyType' : _reflection.GeneratedProtocolMessageType('EmptyType', (_message.Message,), {
+    'DESCRIPTOR' : _PRIMITIVE_EMPTYTYPE,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.EmptyType)
+    })
+  ,
+  'DESCRIPTOR' : _PRIMITIVE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Primitive)
+  })
+_sym_db.RegisterMessage(Primitive)
+_sym_db.RegisterMessage(Primitive.NullType)
+_sym_db.RegisterMessage(Primitive.EmptyType)
+
+Attribute = _reflection.GeneratedProtocolMessageType('Attribute', (_message.Message,), {
+
+  'Symbol' : _reflection.GeneratedProtocolMessageType('Symbol', (_message.Message,), {
+    'DESCRIPTOR' : _ATTRIBUTE_SYMBOL,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Attribute.Symbol)
+    })
+  ,
+  'DESCRIPTOR' : _ATTRIBUTE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Attribute)
+  })
+_sym_db.RegisterMessage(Attribute)
+_sym_db.RegisterMessage(Attribute.Symbol)
+
+Style = _reflection.GeneratedProtocolMessageType('Style', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _STYLE_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Style.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _STYLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Style)
+  })
+_sym_db.RegisterMessage(Style)
+_sym_db.RegisterMessage(Style.Entry)
+
+Styleable = _reflection.GeneratedProtocolMessageType('Styleable', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _STYLEABLE_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Styleable.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _STYLEABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Styleable)
+  })
+_sym_db.RegisterMessage(Styleable)
+_sym_db.RegisterMessage(Styleable.Entry)
+
+Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), {
+
+  'Element' : _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), {
+    'DESCRIPTOR' : _ARRAY_ELEMENT,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Array.Element)
+    })
+  ,
+  'DESCRIPTOR' : _ARRAY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Array)
+  })
+_sym_db.RegisterMessage(Array)
+_sym_db.RegisterMessage(Array.Element)
+
+Plural = _reflection.GeneratedProtocolMessageType('Plural', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _PLURAL_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Plural.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _PLURAL,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Plural)
+  })
+_sym_db.RegisterMessage(Plural)
+_sym_db.RegisterMessage(Plural.Entry)
+
+XmlNode = _reflection.GeneratedProtocolMessageType('XmlNode', (_message.Message,), {
+  'DESCRIPTOR' : _XMLNODE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlNode)
+  })
+_sym_db.RegisterMessage(XmlNode)
+
+XmlElement = _reflection.GeneratedProtocolMessageType('XmlElement', (_message.Message,), {
+  'DESCRIPTOR' : _XMLELEMENT,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlElement)
+  })
+_sym_db.RegisterMessage(XmlElement)
+
+XmlNamespace = _reflection.GeneratedProtocolMessageType('XmlNamespace', (_message.Message,), {
+  'DESCRIPTOR' : _XMLNAMESPACE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlNamespace)
+  })
+_sym_db.RegisterMessage(XmlNamespace)
+
+XmlAttribute = _reflection.GeneratedProtocolMessageType('XmlAttribute', (_message.Message,), {
+  'DESCRIPTOR' : _XMLATTRIBUTE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlAttribute)
+  })
+_sym_db.RegisterMessage(XmlAttribute)
+
+
+DESCRIPTOR._options = None
+_PRIMITIVE.fields_by_name['dimension_value_deprecated']._options = None
+_PRIMITIVE.fields_by_name['fraction_value_deprecated']._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/gyp/proto/__init__.py b/src/build/android/gyp/proto/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/gyp/proto/__init__.py
diff --git a/src/build/android/gyp/resources_shrinker/BUILD.gn b/src/build/android/gyp/resources_shrinker/BUILD.gn
new file mode 100644
index 0000000..e6381e1
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/BUILD.gn
@@ -0,0 +1,15 @@
+import("//build/config/android/rules.gni")
+
+java_binary("resources_shrinker") {
+  sources = [ "//build/android/gyp/resources_shrinker/Shrinker.java" ]
+  main_class = "build.android.gyp.resources_shrinker.Shrinker"
+  deps = [
+    "//third_party/android_deps:com_android_tools_common_java",
+    "//third_party/android_deps:com_android_tools_layoutlib_layoutlib_api_java",
+    "//third_party/android_deps:com_android_tools_sdk_common_java",
+    "//third_party/android_deps:com_google_guava_guava_java",
+    "//third_party/android_deps:org_jetbrains_kotlin_kotlin_stdlib_java",
+    "//third_party/r8:r8_java",
+  ]
+  wrapper_script_name = "helper/resources_shrinker"
+}
diff --git a/src/build/android/gyp/resources_shrinker/Shrinker.java b/src/build/android/gyp/resources_shrinker/Shrinker.java
new file mode 100644
index 0000000..50e2f93
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/Shrinker.java
@@ -0,0 +1,599 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Modifications are owned by the Chromium Authors.
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package build.android.gyp.resources_shrinker;
+
+import static com.android.ide.common.symbols.SymbolIo.readFromAapt;
+import static com.android.utils.SdkUtils.endsWithIgnoreCase;
+import static com.google.common.base.Charsets.UTF_8;
+
+import com.android.ide.common.resources.usage.ResourceUsageModel;
+import com.android.ide.common.resources.usage.ResourceUsageModel.Resource;
+import com.android.ide.common.symbols.Symbol;
+import com.android.ide.common.symbols.SymbolTable;
+import com.android.resources.ResourceFolderType;
+import com.android.resources.ResourceType;
+import com.android.tools.r8.CompilationFailedException;
+import com.android.tools.r8.ProgramResource;
+import com.android.tools.r8.ProgramResourceProvider;
+import com.android.tools.r8.ResourceShrinker;
+import com.android.tools.r8.ResourceShrinker.Command;
+import com.android.tools.r8.ResourceShrinker.ReferenceChecker;
+import com.android.tools.r8.origin.PathOrigin;
+import com.android.utils.XmlUtils;
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Maps;
+import com.google.common.io.ByteStreams;
+import com.google.common.io.Closeables;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.xml.sax.SAXException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import javax.xml.parsers.ParserConfigurationException;
+
+/**
+  Copied with modifications from gradle core source
+  https://android.googlesource.com/platform/tools/base/+/master/build-system/gradle-core/src/main/groovy/com/android/build/gradle/tasks/ResourceUsageAnalyzer.java
+
+  Modifications are mostly to:
+    - Remove unused code paths to reduce complexity.
+    - Reduce dependencies unless absolutely required.
+*/
+
+public class Shrinker {
+    private static final String ANDROID_RES = "android_res/";
+    private static final String DOT_DEX = ".dex";
+    private static final String DOT_CLASS = ".class";
+    private static final String DOT_XML = ".xml";
+    private static final String DOT_JAR = ".jar";
+    private static final String FN_RESOURCE_TEXT = "R.txt";
+
+    /* A source of resource classes to track, can be either a folder or a jar */
+    private final Iterable<File> mRTxtFiles;
+    private final File mProguardMapping;
+    /** These can be class or dex files. */
+    private final Iterable<File> mClasses;
+    private final Iterable<File> mManifests;
+    private final Iterable<File> mResourceDirs;
+
+    private final File mReportFile;
+    private final StringWriter mDebugOutput;
+    private final PrintWriter mDebugPrinter;
+
+    /** Easy way to invoke more verbose output for debugging */
+    private boolean mDebug = false;
+
+    /** The computed set of unused resources */
+    private List<Resource> mUnused;
+
+    /**
+     * Map from resource class owners (VM format class) to corresponding resource entries.
+     * This lets us map back from code references (obfuscated class and possibly obfuscated field
+     * reference) back to the corresponding resource type and name.
+     */
+    private Map<String, Pair<ResourceType, Map<String, String>>> mResourceObfuscation =
+            Maps.newHashMapWithExpectedSize(30);
+
+    /** Obfuscated name of android/support/v7/widget/SuggestionsAdapter.java */
+    private String mSuggestionsAdapter;
+
+    /** Obfuscated name of android/support/v7/internal/widget/ResourcesWrapper.java */
+    private String mResourcesWrapper;
+
+    /* A Pair class because java does not come with batteries included. */
+    private static class Pair<U, V> {
+        private U mFirst;
+        private V mSecond;
+
+        Pair(U first, V second) {
+            this.mFirst = first;
+            this.mSecond = second;
+        }
+
+        public U getFirst() {
+            return mFirst;
+        }
+
+        public V getSecond() {
+            return mSecond;
+        }
+    }
+
+    public Shrinker(Iterable<File> rTxtFiles, Iterable<File> classes, Iterable<File> manifests,
+            File mapping, Iterable<File> resources, File reportFile) {
+        mRTxtFiles = rTxtFiles;
+        mProguardMapping = mapping;
+        mClasses = classes;
+        mManifests = manifests;
+        mResourceDirs = resources;
+
+        mReportFile = reportFile;
+        if (reportFile != null) {
+            mDebugOutput = new StringWriter(8 * 1024);
+            mDebugPrinter = new PrintWriter(mDebugOutput);
+        } else {
+            mDebugOutput = null;
+            mDebugPrinter = null;
+        }
+    }
+
+    public void close() {
+        if (mDebugOutput != null) {
+            String output = mDebugOutput.toString();
+
+            if (mReportFile != null) {
+                File dir = mReportFile.getParentFile();
+                if (dir != null) {
+                    if ((dir.exists() || dir.mkdir()) && dir.canWrite()) {
+                        try {
+                            Files.asCharSink(mReportFile, Charsets.UTF_8).write(output);
+                        } catch (IOException ignore) {
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    public void analyze() throws IOException, ParserConfigurationException, SAXException {
+        gatherResourceValues(mRTxtFiles);
+        recordMapping(mProguardMapping);
+
+        for (File jarOrDir : mClasses) {
+            recordClassUsages(jarOrDir);
+        }
+        recordManifestUsages(mManifests);
+        recordResources(mResourceDirs);
+        dumpReferences();
+        mModel.processToolsAttributes();
+        mUnused = mModel.findUnused();
+    }
+
+    public void emitConfig(Path destination) throws IOException {
+        File destinationFile = destination.toFile();
+        if (!destinationFile.exists()) {
+            destinationFile.getParentFile().mkdirs();
+            boolean success = destinationFile.createNewFile();
+            if (!success) {
+                throw new IOException("Could not create " + destination);
+            }
+        }
+        StringBuilder sb = new StringBuilder();
+        Collections.sort(mUnused);
+        for (Resource resource : mUnused) {
+            sb.append(resource.type + "/" + resource.name + "#remove\n");
+        }
+        Files.asCharSink(destinationFile, UTF_8).write(sb.toString());
+    }
+
+    private void dumpReferences() {
+        if (mDebugPrinter != null) {
+            mDebugPrinter.print(mModel.dumpReferences());
+        }
+    }
+
+    private void recordResources(Iterable<File> resources)
+            throws IOException, SAXException, ParserConfigurationException {
+        for (File resDir : resources) {
+            File[] resourceFolders = resDir.listFiles();
+            if (resourceFolders != null) {
+                for (File folder : resourceFolders) {
+                    ResourceFolderType folderType =
+                            ResourceFolderType.getFolderType(folder.getName());
+                    if (folderType != null) {
+                        recordResources(folderType, folder);
+                    }
+                }
+            }
+        }
+    }
+
+    private void recordResources(ResourceFolderType folderType, File folder)
+            throws ParserConfigurationException, SAXException, IOException {
+        File[] files = folder.listFiles();
+        if (files != null) {
+            for (File file : files) {
+                String path = file.getPath();
+                mModel.file = file;
+                try {
+                    boolean isXml = endsWithIgnoreCase(path, DOT_XML);
+                    if (isXml) {
+                        String xml = Files.toString(file, UTF_8);
+                        Document document = XmlUtils.parseDocument(xml, true);
+                        mModel.visitXmlDocument(file, folderType, document);
+                    } else {
+                        mModel.visitBinaryResource(folderType, file);
+                    }
+                } finally {
+                    mModel.file = null;
+                }
+            }
+        }
+    }
+
+    void recordMapping(File mapping) throws IOException {
+        if (mapping == null || !mapping.exists()) {
+            return;
+        }
+        final String arrowString = " -> ";
+        final String resourceString = ".R$";
+        Map<String, String> nameMap = null;
+        for (String line : Files.readLines(mapping, UTF_8)) {
+            if (line.startsWith(" ") || line.startsWith("\t")) {
+                if (nameMap != null) {
+                    // We're processing the members of a resource class: record names into the map
+                    int n = line.length();
+                    int i = 0;
+                    for (; i < n; i++) {
+                        if (!Character.isWhitespace(line.charAt(i))) {
+                            break;
+                        }
+                    }
+                    if (i < n && line.startsWith("int", i)) { // int or int[]
+                        int start = line.indexOf(' ', i + 3) + 1;
+                        int arrow = line.indexOf(arrowString);
+                        if (start > 0 && arrow != -1) {
+                            int end = line.indexOf(' ', start + 1);
+                            if (end != -1) {
+                                String oldName = line.substring(start, end);
+                                String newName =
+                                        line.substring(arrow + arrowString.length()).trim();
+                                if (!newName.equals(oldName)) {
+                                    nameMap.put(newName, oldName);
+                                }
+                            }
+                        }
+                    }
+                }
+                continue;
+            } else {
+                nameMap = null;
+            }
+            int index = line.indexOf(resourceString);
+            if (index == -1) {
+                // Record obfuscated names of a few known appcompat usages of
+                // Resources#getIdentifier that are unlikely to be used for general
+                // resource name reflection
+                if (line.startsWith("android.support.v7.widget.SuggestionsAdapter ")) {
+                    mSuggestionsAdapter =
+                            line.substring(line.indexOf(arrowString) + arrowString.length(),
+                                        line.indexOf(':') != -1 ? line.indexOf(':') : line.length())
+                                    .trim()
+                                    .replace('.', '/')
+                            + DOT_CLASS;
+                } else if (line.startsWith("android.support.v7.internal.widget.ResourcesWrapper ")
+                        || line.startsWith("android.support.v7.widget.ResourcesWrapper ")
+                        || (mResourcesWrapper == null // Recently wrapper moved
+                                && line.startsWith(
+                                        "android.support.v7.widget.TintContextWrapper$TintResources "))) {
+                    mResourcesWrapper =
+                            line.substring(line.indexOf(arrowString) + arrowString.length(),
+                                        line.indexOf(':') != -1 ? line.indexOf(':') : line.length())
+                                    .trim()
+                                    .replace('.', '/')
+                            + DOT_CLASS;
+                }
+                continue;
+            }
+            int arrow = line.indexOf(arrowString, index + 3);
+            if (arrow == -1) {
+                continue;
+            }
+            String typeName = line.substring(index + resourceString.length(), arrow);
+            ResourceType type = ResourceType.fromClassName(typeName);
+            if (type == null) {
+                continue;
+            }
+            int end = line.indexOf(':', arrow + arrowString.length());
+            if (end == -1) {
+                end = line.length();
+            }
+            String target = line.substring(arrow + arrowString.length(), end).trim();
+            String ownerName = target.replace('.', '/');
+
+            nameMap = Maps.newHashMap();
+            Pair<ResourceType, Map<String, String>> pair = new Pair(type, nameMap);
+            mResourceObfuscation.put(ownerName, pair);
+            // For fast lookup in isResourceClass
+            mResourceObfuscation.put(ownerName + DOT_CLASS, pair);
+        }
+    }
+
+    private void recordManifestUsages(File manifest)
+            throws IOException, ParserConfigurationException, SAXException {
+        String xml = Files.toString(manifest, UTF_8);
+        Document document = XmlUtils.parseDocument(xml, true);
+        mModel.visitXmlDocument(manifest, null, document);
+    }
+
+    private void recordManifestUsages(Iterable<File> manifests)
+            throws IOException, ParserConfigurationException, SAXException {
+        for (File manifest : manifests) {
+            recordManifestUsages(manifest);
+        }
+    }
+
+    private void recordClassUsages(File file) throws IOException {
+        assert file.isFile();
+        if (file.getPath().endsWith(DOT_DEX)) {
+            byte[] bytes = Files.toByteArray(file);
+            recordClassUsages(file, file.getName(), bytes);
+        } else if (file.getPath().endsWith(DOT_JAR)) {
+            ZipInputStream zis = null;
+            try {
+                FileInputStream fis = new FileInputStream(file);
+                try {
+                    zis = new ZipInputStream(fis);
+                    ZipEntry entry = zis.getNextEntry();
+                    while (entry != null) {
+                        String name = entry.getName();
+                        if (name.endsWith(DOT_DEX)) {
+                            byte[] bytes = ByteStreams.toByteArray(zis);
+                            if (bytes != null) {
+                                recordClassUsages(file, name, bytes);
+                            }
+                        }
+
+                        entry = zis.getNextEntry();
+                    }
+                } finally {
+                    Closeables.close(fis, true);
+                }
+            } finally {
+                Closeables.close(zis, true);
+            }
+        }
+    }
+
+    private void recordClassUsages(File file, String name, byte[] bytes) {
+        assert name.endsWith(DOT_DEX);
+        ReferenceChecker callback = new ReferenceChecker() {
+            @Override
+            public boolean shouldProcess(String internalName) {
+                return !isResourceClass(internalName + DOT_CLASS);
+            }
+
+            @Override
+            public void referencedInt(int value) {
+                Shrinker.this.referencedInt("dex", value, file, name);
+            }
+
+            @Override
+            public void referencedString(String value) {
+                // do nothing.
+            }
+
+            @Override
+            public void referencedStaticField(String internalName, String fieldName) {
+                Resource resource = getResourceFromCode(internalName, fieldName);
+                if (resource != null) {
+                    ResourceUsageModel.markReachable(resource);
+                }
+            }
+
+            @Override
+            public void referencedMethod(
+                    String internalName, String methodName, String methodDescriptor) {
+                // Do nothing.
+            }
+        };
+        ProgramResource resource = ProgramResource.fromBytes(
+                new PathOrigin(file.toPath()), ProgramResource.Kind.DEX, bytes, null);
+        ProgramResourceProvider provider = () -> Arrays.asList(resource);
+        try {
+            Command command =
+                    (new ResourceShrinker.Builder()).addProgramResourceProvider(provider).build();
+            ResourceShrinker.run(command, callback);
+        } catch (CompilationFailedException e) {
+            e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
+        } catch (ExecutionException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /** Returns whether the given class file name points to an aapt-generated compiled R class. */
+    boolean isResourceClass(String name) {
+        if (mResourceObfuscation.containsKey(name)) {
+            return true;
+        }
+        int index = name.lastIndexOf('/');
+        if (index != -1 && name.startsWith("R$", index + 1) && name.endsWith(DOT_CLASS)) {
+            String typeName = name.substring(index + 3, name.length() - DOT_CLASS.length());
+            return ResourceType.fromClassName(typeName) != null;
+        }
+        return false;
+    }
+
+    Resource getResourceFromCode(String owner, String name) {
+        Pair<ResourceType, Map<String, String>> pair = mResourceObfuscation.get(owner);
+        if (pair != null) {
+            ResourceType type = pair.getFirst();
+            Map<String, String> nameMap = pair.getSecond();
+            String renamedField = nameMap.get(name);
+            if (renamedField != null) {
+                name = renamedField;
+            }
+            return mModel.getResource(type, name);
+        }
+        if (isValidResourceType(owner)) {
+            ResourceType type =
+                    ResourceType.fromClassName(owner.substring(owner.lastIndexOf('$') + 1));
+            if (type != null) {
+                return mModel.getResource(type, name);
+            }
+        }
+        return null;
+    }
+
+    private Boolean isValidResourceType(String candidateString) {
+        return candidateString.contains("/")
+                && candidateString.substring(candidateString.lastIndexOf('/') + 1).contains("$");
+    }
+
+    private void gatherResourceValues(Iterable<File> rTxts) throws IOException {
+        for (File rTxt : rTxts) {
+            assert rTxt.isFile();
+            assert rTxt.getName().endsWith(FN_RESOURCE_TEXT);
+            addResourcesFromRTxtFile(rTxt);
+        }
+    }
+
+    private void addResourcesFromRTxtFile(File file) {
+        try {
+            SymbolTable st = readFromAapt(file, null);
+            for (Symbol symbol : st.getSymbols().values()) {
+                String symbolValue = symbol.getValue();
+                if (symbol.getResourceType() == ResourceType.STYLEABLE) {
+                    if (symbolValue.trim().startsWith("{")) {
+                        // Only add the styleable parent, styleable children are not yet supported.
+                        mModel.addResource(symbol.getResourceType(), symbol.getName(), null);
+                    }
+                } else {
+                    mModel.addResource(symbol.getResourceType(), symbol.getName(), symbolValue);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    ResourceUsageModel getModel() {
+        return mModel;
+    }
+
+    private void referencedInt(String context, int value, File file, String currentClass) {
+        Resource resource = mModel.getResource(value);
+        if (ResourceUsageModel.markReachable(resource) && mDebug) {
+            assert mDebugPrinter != null : "mDebug is true, but mDebugPrinter is null.";
+            mDebugPrinter.println("Marking " + resource + " reachable: referenced from " + context
+                    + " in " + file + ":" + currentClass);
+        }
+    }
+
+    private final ResourceShrinkerUsageModel mModel = new ResourceShrinkerUsageModel();
+
+    private class ResourceShrinkerUsageModel extends ResourceUsageModel {
+        public File file;
+
+        /**
+         * Whether we should ignore tools attribute resource references.
+         * <p>
+         * For example, for resource shrinking we want to ignore tools attributes,
+         * whereas for resource refactoring on the source code we do not.
+         *
+         * @return whether tools attributes should be ignored
+         */
+        @Override
+        protected boolean ignoreToolsAttributes() {
+            return true;
+        }
+
+        @Override
+        protected void onRootResourcesFound(List<Resource> roots) {
+            if (mDebugPrinter != null) {
+                mDebugPrinter.println(
+                        "\nThe root reachable resources are:\n" + Joiner.on(",\n   ").join(roots));
+            }
+        }
+
+        @Override
+        protected Resource declareResource(ResourceType type, String name, Node node) {
+            Resource resource = super.declareResource(type, name, node);
+            resource.addLocation(file);
+            return resource;
+        }
+
+        @Override
+        protected void referencedString(String string) {
+            // Do nothing
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        List<File> rTxtFiles = null; // R.txt files
+        List<File> classes = null; // Dex/jar w dex
+        List<File> manifests = null; // manifests
+        File mapping = null; // mapping
+        List<File> resources = null; // resources dirs
+        File log = null; // output log for debugging
+        Path configPath = null; // output config
+        for (int i = 0; i < args.length; i += 2) {
+            switch (args[i]) {
+                case "--rtxts":
+                    rTxtFiles = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--dex":
+                    classes = Arrays.stream(args[i + 1].split(":"))
+                                      .map(s -> new File(s))
+                                      .collect(Collectors.toList());
+                    break;
+                case "--manifests":
+                    manifests = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--mapping":
+                    mapping = new File(args[i + 1]);
+                    break;
+                case "--resourceDirs":
+                    resources = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--log":
+                    log = new File(args[i + 1]);
+                    break;
+                case "--outputConfig":
+                    configPath = Paths.get(args[i + 1]);
+                    break;
+                default:
+                    throw new IllegalArgumentException(args[i] + " is not a valid arg.");
+            }
+        }
+        Shrinker shrinker = new Shrinker(rTxtFiles, classes, manifests, mapping, resources, log);
+        shrinker.analyze();
+        shrinker.close();
+        shrinker.emitConfig(configPath);
+    }
+}
diff --git a/src/build/android/gyp/resources_shrinker/shrinker.py b/src/build/android/gyp/resources_shrinker/shrinker.py
new file mode 100755
index 0000000..2800ce2
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/shrinker.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+from util import resource_utils
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--script',
+                      required=True,
+                      help='Path to the unused resources detector script.')
+  parser.add_argument(
+      '--dependencies-res-zips',
+      required=True,
+      help='Resources zip archives to investigate for unused resources.')
+  parser.add_argument('--dex',
+                      required=True,
+                      help='Path to dex file, or zip with dex files.')
+  parser.add_argument(
+      '--proguard-mapping',
+      required=True,
+      help='Path to proguard mapping file for the optimized dex.')
+  parser.add_argument('--r-text', required=True, help='Path to R.txt')
+  parser.add_argument('--android-manifest',
+                      required=True,
+                      help='Path to AndroidManifest')
+  parser.add_argument('--output-config',
+                      required=True,
+                      help='Path to output the aapt2 config to.')
+  args = build_utils.ExpandFileArgs(args)
+  options = parser.parse_args(args)
+  options.dependencies_res_zips = (build_utils.ParseGnList(
+      options.dependencies_res_zips))
+
+  # in case of no resources, short circuit early.
+  if not options.dependencies_res_zips:
+    build_utils.Touch(options.output_config)
+    return
+
+  with build_utils.TempDir() as temp_dir:
+    dep_subdirs = []
+    for dependency_res_zip in options.dependencies_res_zips:
+      dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir)
+
+    build_utils.CheckOutput([
+        options.script, '--rtxts', options.r_text, '--manifests',
+        options.android_manifest, '--resourceDirs', ':'.join(dep_subdirs),
+        '--dex', options.dex, '--mapping', options.proguard_mapping,
+        '--outputConfig', options.output_config
+    ])
+
+  if options.depfile:
+    depfile_deps = options.dependencies_res_zips + [
+        options.r_text,
+        options.android_manifest,
+        options.dex,
+        options.proguard_mapping,
+    ]
+    build_utils.WriteDepfile(options.depfile, options.output_config,
+                             depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/resources_shrinker/shrinker.pydeps b/src/build/android/gyp/resources_shrinker/shrinker.pydeps
new file mode 100644
index 0000000..92c8905
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/shrinker.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp/resources_shrinker --output build/android/gyp/resources_shrinker/shrinker.pydeps build/android/gyp/resources_shrinker/shrinker.py
+../../../../third_party/jinja2/__init__.py
+../../../../third_party/jinja2/_compat.py
+../../../../third_party/jinja2/asyncfilters.py
+../../../../third_party/jinja2/asyncsupport.py
+../../../../third_party/jinja2/bccache.py
+../../../../third_party/jinja2/compiler.py
+../../../../third_party/jinja2/defaults.py
+../../../../third_party/jinja2/environment.py
+../../../../third_party/jinja2/exceptions.py
+../../../../third_party/jinja2/filters.py
+../../../../third_party/jinja2/idtracking.py
+../../../../third_party/jinja2/lexer.py
+../../../../third_party/jinja2/loaders.py
+../../../../third_party/jinja2/nodes.py
+../../../../third_party/jinja2/optimizer.py
+../../../../third_party/jinja2/parser.py
+../../../../third_party/jinja2/runtime.py
+../../../../third_party/jinja2/tests.py
+../../../../third_party/jinja2/utils.py
+../../../../third_party/jinja2/visitor.py
+../../../../third_party/markupsafe/__init__.py
+../../../../third_party/markupsafe/_compat.py
+../../../../third_party/markupsafe/_native.py
+../../../gn_helpers.py
+../util/__init__.py
+../util/build_utils.py
+../util/resource_utils.py
+shrinker.py
diff --git a/src/build/android/gyp/test/BUILD.gn b/src/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000..301a220
--- /dev/null
+++ b/src/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,11 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+  sources = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+  deps = [ ":hello_world_java" ]
+  sources = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+  main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000..10860d8
--- /dev/null
+++ b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+    public static void main(String[] args) {
+        if (args.length > 0) {
+            System.exit(Integer.parseInt(args[0]));
+        }
+        HelloWorldPrinter.print();
+    }
+}
+
diff --git a/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000..b09673e
--- /dev/null
+++ b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+    public static void print() {
+        System.out.println("Hello, world!");
+    }
+}
+
diff --git a/src/build/android/gyp/turbine.py b/src/build/android/gyp/turbine.py
new file mode 100755
index 0000000..208cc76
--- /dev/null
+++ b/src/build/android/gyp/turbine.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wraps the turbine jar and expands @FileArgs."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import time
+
+from util import build_utils
+
+
+def main(argv):
+  build_utils.InitLogging('TURBINE_DEBUG')
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--turbine-jar-path', required=True, help='Path to the turbine jar file.')
+  parser.add_argument(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_argument(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_argument(
+      '--java-version',
+      help='Java language version to use in -source and -target args to javac.')
+  parser.add_argument('--classpath', action='append', help='Classpath to use.')
+  parser.add_argument(
+      '--processors',
+      action='append',
+      help='GN list of annotation processor main classes.')
+  parser.add_argument(
+      '--processorpath',
+      action='append',
+      help='GN list of jars that comprise the classpath used for Annotation '
+      'Processors.')
+  parser.add_argument(
+      '--processor-args',
+      action='append',
+      help='key=value arguments for the annotation processors.')
+  parser.add_argument('--jar-path', help='Jar output path.', required=True)
+  parser.add_argument(
+      '--generated-jar-path',
+      required=True,
+      help='Output path for generated source files.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options, unknown_args = parser.parse_known_args(argv)
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.processorpath = build_utils.ParseGnList(options.processorpath)
+  options.processors = build_utils.ParseGnList(options.processors)
+  options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+
+  files = []
+  for arg in unknown_args:
+    # Interpret a path prefixed with @ as a file containing a list of sources.
+    if arg.startswith('@'):
+      files.extend(build_utils.ReadSourcesList(arg[1:]))
+
+  cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+      '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main'
+  ]
+  javac_cmd = []
+
+  # Turbine reads lists from command line args by consuming args until one
+  # starts with double dash (--). Thus command line args should be grouped
+  # together and passed in together.
+  if options.processors:
+    cmd += ['--processors']
+    cmd += options.processors
+
+  if options.java_version:
+    javac_cmd.extend([
+        '-source',
+        options.java_version,
+        '-target',
+        options.java_version,
+    ])
+  if options.java_version == '1.8':
+    # Android's boot jar doesn't contain all java 8 classes.
+    options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+  if options.bootclasspath:
+    cmd += ['--bootclasspath']
+    for bootclasspath in options.bootclasspath:
+      cmd += bootclasspath.split(':')
+
+  if options.processorpath:
+    cmd += ['--processorpath']
+    cmd += options.processorpath
+
+  if options.processor_args:
+    for arg in options.processor_args:
+      javac_cmd.extend(['-A%s' % arg])
+
+  if options.classpath:
+    cmd += ['--classpath']
+    cmd += options.classpath
+
+  if options.java_srcjars:
+    cmd += ['--source_jars']
+    cmd += options.java_srcjars
+
+  if files:
+    # Use jar_path to ensure paths are relative (needed for goma).
+    files_rsp_path = options.jar_path + '.files_list.txt'
+    with open(files_rsp_path, 'w') as f:
+      f.write(' '.join(files))
+    # Pass source paths as response files to avoid extremely long command lines
+    # that are tedius to debug.
+    cmd += ['--sources']
+    cmd += ['@' + files_rsp_path]
+
+  if javac_cmd:
+    cmd.append('--javacopts')
+    cmd += javac_cmd
+    cmd.append('--')  # Terminate javacopts
+
+  # Use AtomicOutput so that output timestamps are not updated when outputs
+  # are not changed.
+  with build_utils.AtomicOutput(options.jar_path) as output_jar, \
+      build_utils.AtomicOutput(options.generated_jar_path) as generated_jar:
+    cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name]
+    logging.debug('Command: %s', cmd)
+    start = time.time()
+    build_utils.CheckOutput(cmd,
+                            print_stdout=True,
+                            fail_on_output=options.warnings_as_errors)
+    end = time.time() - start
+    logging.info('Header compilation took %ss', end)
+
+  if options.depfile:
+    # GN already knows of the java files, so avoid listing individual java files
+    # in the depfile.
+    depfile_deps = (options.bootclasspath + options.classpath +
+                    options.processorpath + options.java_srcjars)
+    build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/turbine.pydeps b/src/build/android/gyp/turbine.pydeps
new file mode 100644
index 0000000..f0b2411
--- /dev/null
+++ b/src/build/android/gyp/turbine.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py
+../../gn_helpers.py
+turbine.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/util/__init__.py b/src/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/gyp/util/build_utils.py b/src/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..d1d3a72
--- /dev/null
+++ b/src/build/android/gyp/util/build_utils.py
@@ -0,0 +1,724 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import atexit
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import logging
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import time
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+                             os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Use relative paths to improved hermetic property of build scripts.
+DIR_SOURCE_ROOT = os.path.relpath(
+    os.environ.get(
+        'CHECKOUT_SOURCE_ROOT',
+        os.path.join(
+            os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+            os.pardir)))
+JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
+JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
+RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras',
+                           'java_8', 'jre', 'lib', 'rt.jar')
+
+try:
+  string_types = basestring
+except NameError:
+  string_types = (str, bytes)
+
+
+def JavaCmd(verify=True, xmx='1G'):
+  ret = [os.path.join(JAVA_HOME, 'bin', 'java')]
+  # Limit heap to avoid Java not GC'ing when it should, and causing
+  # bots to OOM when many java commands are runnig at the same time
+  # https://crbug.com/1098333
+  ret += ['-Xmx' + xmx]
+
+  # Disable bytecode verification for local builds gives a ~2% speed-up.
+  if not verify:
+    ret += ['-noverify']
+
+  return ret
+
+
+@contextlib.contextmanager
+def TempDir(**kwargs):
+  dirname = tempfile.mkdtemp(**kwargs)
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter='*'):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def ParseGnList(value):
+  """Converts a "GN-list" command-line parameter into a list.
+
+  Conversions handled:
+    * None -> []
+    * '' -> []
+    * 'asdf' -> ['asdf']
+    * '["a", "b"]' -> ['a', 'b']
+    * ['["a", "b"]', 'c'] -> ['a', 'b', 'c']  (flattened list)
+
+  The common use for this behavior is in the Android build where things can
+  take lists of @FileArg references that are expanded via ExpandFileArgs.
+  """
+  # Convert None to [].
+  if not value:
+    return []
+  # Convert a list of GN lists to a flattened list.
+  if isinstance(value, list):
+    ret = []
+    for arg in value:
+      ret.extend(ParseGnList(arg))
+    return ret
+  # Convert normal GN list.
+  if value.startswith('['):
+    return gn_helpers.GNValueParser(value).ParseList()
+  # Convert a single string value to a list.
+  return [value]
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True, mode='w+b'):
+  """Helper to prevent half-written outputs.
+
+  Args:
+    path: Path to the final output file, which will be written atomically.
+    only_if_changed: If True (the default), do not touch the filesystem
+      if the content has not changed.
+    mode: The mode to open the file in (str).
+  Returns:
+    A python context manager that yelds a NamedTemporaryFile instance
+    that must be used by clients to write the data to. On exit, the
+    manager will try to replace the final output file with the
+    temporary one if necessary. The temporary file is always destroyed
+    on exit.
+  Example:
+    with build_utils.AtomicOutput(output_path) as tmp_file:
+      subprocess.check_call(['prog', '--output', tmp_file.name])
+  """
+  # Create in same directory to ensure same filesystem when moving.
+  dirname = os.path.dirname(path)
+  if not os.path.exists(dirname):
+    MakeDirectory(dirname)
+  with tempfile.NamedTemporaryFile(
+      mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f:
+    try:
+      yield f
+
+      # file should be closed before comparison/move.
+      f.close()
+      if not (only_if_changed and os.path.exists(path) and
+              filecmp.cmp(f.name, path)):
+        shutil.move(f.name, path)
+    finally:
+      if os.path.exists(f.name):
+        os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+  """Output filter from build_utils.CheckOutput.
+
+  Args:
+    output: Executable output as from build_utils.CheckOutput.
+    filter_string: An RE string that will filter (remove) matching
+        lines from |output|.
+
+  Returns:
+    The filtered output, as a single string.
+  """
+  re_filter = re.compile(filter_string)
+  return '\n'.join(
+      line for line in output.split('\n') if not re_filter.search(line))
+
+
+def FilterReflectiveAccessJavaWarnings(output):
+  """Filters out warnings about illegal reflective access operation.
+
+  These warnings were introduced in Java 9, and generally mean that dependencies
+  need to be updated.
+  """
+  #  WARNING: An illegal reflective access operation has occurred
+  #  WARNING: Illegal reflective access by ...
+  #  WARNING: Please consider reporting this to the maintainers of ...
+  #  WARNING: Use --illegal-access=warn to enable warnings of further ...
+  #  WARNING: All illegal access operations will be denied in a future release
+  return FilterLines(
+      output, r'WARNING: ('
+      'An illegal reflective|'
+      'Illegal reflective access|'
+      'Please consider reporting this to|'
+      'Use --illegal-access=warn|'
+      'All illegal access operations)')
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args,
+                cwd=None,
+                env=None,
+                print_stdout=False,
+                print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_on_output=True,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+  stdout, stderr = child.communicate()
+
+  # For Python3 only:
+  if isinstance(stdout, bytes) and sys.version_info >= (3, ):
+    stdout = stdout.decode('utf-8')
+    stderr = stderr.decode('utf-8')
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func and fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  has_stdout = print_stdout and stdout
+  has_stderr = print_stderr and stderr
+  if fail_on_output and (has_stdout or has_stderr):
+    MSG = """\
+Command failed because it wrote to {}.
+You can often set treat_warnings_as_errors=false to not treat output as \
+failure (useful when developing locally)."""
+    if has_stdout and has_stderr:
+      stream_string = 'stdout and stderr'
+    elif has_stdout:
+      stream_string = 'stdout'
+    else:
+      stream_string = 'stderr'
+    raise CalledProcessError(cwd, args, MSG.format(stream_string))
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def _CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+  zi = zip_file.getinfo(name)
+
+  # The two high-order bytes of ZipInfo.external_attr represent
+  # UNIX permissions and file type bits.
+  return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+               predicate=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  if not zipfile.is_zipfile(zip_path):
+    raise Exception('Invalid zip file: %s' % zip_path)
+
+  extracted = []
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        MakeDirectory(os.path.join(path, name))
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      if predicate and not predicate(name):
+        continue
+      _CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+      if _IsSymlink(z, name):
+        dest = os.path.join(path, name)
+        MakeDirectory(os.path.dirname(dest))
+        os.symlink(z.read(name), dest)
+        extracted.append(dest)
+      else:
+        z.extract(name, path)
+        extracted.append(os.path.join(path, name))
+
+  return extracted
+
+
+def HermeticDateTime(timestamp=None):
+  """Returns a constant ZipInfo.date_time tuple.
+
+  Args:
+    timestamp: Unix timestamp to use for files in the archive.
+
+  Returns:
+    A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp.
+  """
+  if not timestamp:
+    return (2001, 1, 1, 0, 0, 0)
+  utc_time = time.gmtime(timestamp)
+  return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
+          utc_time.tm_min, utc_time.tm_sec)
+
+
+def HermeticZipInfo(*args, **kwargs):
+  """Creates a zipfile.ZipInfo with a constant timestamp and external_attr.
+
+  If a date_time value is not provided in the positional or keyword arguments,
+  the default value from HermeticDateTime is used.
+
+  Args:
+    See zipfile.ZipInfo.
+
+  Returns:
+    A zipfile.ZipInfo.
+  """
+  # The caller may have provided a date_time either as a positional parameter
+  # (args[1]) or as a keyword parameter. Use the default hermetic date_time if
+  # none was provided.
+  date_time = None
+  if len(args) >= 2:
+    date_time = args[1]
+  elif 'date_time' in kwargs:
+    date_time = kwargs['date_time']
+  if not date_time:
+    kwargs['date_time'] = HermeticDateTime()
+  ret = zipfile.ZipInfo(*args, **kwargs)
+  ret.external_attr = (0o644 << 16)
+  return ret
+
+
+def AddToZipHermetic(zip_file,
+                     zip_path,
+                     src_path=None,
+                     data=None,
+                     compress=None,
+                     date_time=None):
+  """Adds a file to the given ZipFile with a hard-coded modified time.
+
+  Args:
+    zip_file: ZipFile instance to add the file to.
+    zip_path: Destination path within the zip file (or ZipInfo instance).
+    src_path: Path of the source file. Mutually exclusive with |data|.
+    data: File data as a string.
+    compress: Whether to enable compression. Default is taken from ZipFile
+        constructor.
+    date_time: The last modification date and time for the archive member.
+  """
+  assert (src_path is None) != (data is None), (
+      '|src_path| and |data| are mutually exclusive.')
+  if isinstance(zip_path, zipfile.ZipInfo):
+    zipinfo = zip_path
+    zip_path = zipinfo.filename
+  else:
+    zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time)
+
+  _CheckZipPath(zip_path)
+
+  if src_path and os.path.islink(src_path):
+    zipinfo.filename = zip_path
+    zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
+    zip_file.writestr(zipinfo, os.readlink(src_path))
+    return
+
+  # zipfile.write() does
+  #     external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+  # but we want to use _HERMETIC_FILE_ATTR, so manually set
+  # the few attr bits we care about.
+  if src_path:
+    st = os.stat(src_path)
+    for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+      if st.st_mode & mode:
+        zipinfo.external_attr |= mode << 16
+
+  if src_path:
+    with open(src_path, 'rb') as f:
+      data = f.read()
+
+  # zipfile will deflate even when it makes the file bigger. To avoid
+  # growing files, disable compression at an arbitrary cut off point.
+  if len(data) < 16:
+    compress = False
+
+  # None converts to ZIP_STORED, when passed explicitly rather than the
+  # default passed to the ZipFile constructor.
+  compress_type = zip_file.compression
+  if compress is not None:
+    compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+  zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs,
+          output,
+          base_dir=None,
+          compress_fn=None,
+          zip_prefix_path=None,
+          timestamp=None):
+  """Creates a zip file from a list of files.
+
+  Args:
+    inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+    output: Path, fileobj, or ZipFile instance to add files to.
+    base_dir: Prefix to strip from inputs.
+    compress_fn: Applied to each input to determine whether or not to compress.
+        By default, items will be |zipfile.ZIP_STORED|.
+    zip_prefix_path: Path prepended to file path in zip file.
+    timestamp: Unix timestamp to use for files in the archive.
+  """
+  if base_dir is None:
+    base_dir = '.'
+  input_tuples = []
+  for tup in inputs:
+    if isinstance(tup, string_types):
+      tup = (os.path.relpath(tup, base_dir), tup)
+      if tup[0].startswith('..'):
+        raise Exception('Invalid zip_path: ' + tup[0])
+    input_tuples.append(tup)
+
+  # Sort by zip path to ensure stable zip ordering.
+  input_tuples.sort(key=lambda tup: tup[0])
+
+  out_zip = output
+  if not isinstance(output, zipfile.ZipFile):
+    out_zip = zipfile.ZipFile(output, 'w')
+
+  date_time = HermeticDateTime(timestamp)
+  try:
+    for zip_path, fs_path in input_tuples:
+      if zip_prefix_path:
+        zip_path = os.path.join(zip_prefix_path, zip_path)
+      compress = compress_fn(zip_path) if compress_fn else None
+      AddToZipHermetic(out_zip,
+                       zip_path,
+                       src_path=fs_path,
+                       compress=compress,
+                       date_time=date_time)
+  finally:
+    if output is not out_zip:
+      out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+  """Creates a zip file from a directory."""
+  inputs = []
+  for root, _, files in os.walk(base_dir):
+    for f in files:
+      inputs.append(os.path.join(root, f))
+
+  if isinstance(output, zipfile.ZipFile):
+    DoZip(
+        inputs,
+        output,
+        base_dir,
+        compress_fn=compress_fn,
+        zip_prefix_path=zip_prefix_path)
+  else:
+    with AtomicOutput(output) as f:
+      DoZip(
+          inputs,
+          f,
+          base_dir,
+          compress_fn=compress_fn,
+          zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+  """Returns whether the given path matches any of the given glob patterns."""
+  return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+  """Combines all files from |input_zips| into |output|.
+
+  Args:
+    output: Path, fileobj, or ZipFile instance to add files to.
+    input_zips: Iterable of paths to zip files to merge.
+    path_transform: Called for each entry path. Returns a new path, or None to
+        skip the file.
+    compress: Overrides compression setting from origin zip entries.
+  """
+  path_transform = path_transform or (lambda p: p)
+  added_names = set()
+
+  out_zip = output
+  if not isinstance(output, zipfile.ZipFile):
+    out_zip = zipfile.ZipFile(output, 'w')
+
+  try:
+    for in_file in input_zips:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        for info in in_zip.infolist():
+          # Ignore directories.
+          if info.filename[-1] == '/':
+            continue
+          dst_name = path_transform(info.filename)
+          if not dst_name:
+            continue
+          already_added = dst_name in added_names
+          if not already_added:
+            if compress is not None:
+              compress_entry = compress
+            else:
+              compress_entry = info.compress_type != zipfile.ZIP_STORED
+            AddToZipHermetic(
+                out_zip,
+                dst_name,
+                data=in_zip.read(info),
+                compress=compress_entry)
+            added_names.add(dst_name)
+  finally:
+    if output is not out_zip:
+      out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph (crashes if cycles exist).
+
+  Args:
+    top: A list of the top level nodes
+    deps_func: A function that takes a node and returns a list of its direct
+        dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  # Find all deps depth-first, maintaining original order in the case of ties.
+  deps_map = collections.OrderedDict()
+  def discover(nodes):
+    for node in nodes:
+      if node in deps_map:
+        continue
+      deps = deps_func(node)
+      discover(deps)
+      deps_map[node] = deps
+
+  discover(top)
+  return list(deps_map)
+
+
+def InitLogging(enabling_env):
+  logging.basicConfig(
+      level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
+      format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s')
+  script_name = os.path.basename(sys.argv[0])
+  logging.info('Started (%s)', script_name)
+
+  my_pid = os.getpid()
+
+  def log_exit():
+    # Do not log for fork'ed processes.
+    if os.getpid() == my_pid:
+      logging.info("Job's done (%s)", script_name)
+
+  atexit.register(log_exit)
+
+
+def AddDepfileOption(parser):
+  # TODO(agrieve): Get rid of this once we've moved to argparse.
+  if hasattr(parser, 'add_option'):
+    func = parser.add_option
+  else:
+    func = parser.add_argument
+  func('--depfile',
+       help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
+  assert depfile_path != first_gn_output  # http://crbug.com/646165
+  assert not isinstance(inputs, string_types)  # Easy mistake to make
+  inputs = inputs or []
+  MakeDirectory(os.path.dirname(depfile_path))
+  # Ninja does not support multiple outputs in depfiles.
+  with open(depfile_path, 'w') as depfile:
+    depfile.write(first_gn_output.replace(' ', '\\ '))
+    depfile.write(': ')
+    depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]'
+  suffix the (intermediate) value will be interpreted as a single item list and
+  the single item will be returned or used for further traversal.
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    def get_key(key):
+      if key.endswith('[]'):
+        return key[:-2], True
+      return key, False
+
+    lookup_path = match.group(1).split(':')
+    file_path, _ = get_key(lookup_path[0])
+    if not file_path in file_jsons:
+      with open(file_path) as f:
+        file_jsons[file_path] = json.load(f)
+
+    expansion = file_jsons
+    for k in lookup_path:
+      k, flatten = get_key(k)
+      expansion = expansion[k]
+      if flatten:
+        if not isinstance(expansion, list) or not len(expansion) == 1:
+          raise Exception('Expected single item list but got %s' % expansion)
+        expansion = expansion[0]
+
+    # This should match ParseGnList. The output is either a GN-formatted list
+    # or a literal (with no quotes).
+    if isinstance(expansion, list):
+      new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+                     arg[match.end():])
+    else:
+      new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+  return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+  """Reads a GN-written file containing list of file names and returns a list.
+
+  Note that this function should not be used to parse response files.
+  """
+  with open(sources_list_file_name) as f:
+    return [file_name.strip() for file_name in f]
diff --git a/src/build/android/gyp/util/build_utils_test.py b/src/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000..008ea11
--- /dev/null
+++ b/src/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+  def testGetSortedTransitiveDependencies_all(self):
+    TOP = _DEPS.keys()
+    EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+  def testGetSortedTransitiveDependencies_leaves(self):
+    TOP = ['c', 'e', 'g', 'h', 'i']
+    EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+  def testGetSortedTransitiveDependencies_leavesReverse(self):
+    TOP = ['i', 'h', 'g', 'e', 'c']
+    EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/diff_utils.py b/src/build/android/gyp/util/diff_utils.py
new file mode 100644
index 0000000..530a688
--- /dev/null
+++ b/src/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,127 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import difflib
+from util import build_utils
+
+
+def _SkipOmitted(line):
+  """
+  Skip lines that are to be intentionally omitted from the expectations file.
+
+  This is required when the file to be compared against expectations contains
+  a line that changes from build to build because - for instance - it contains
+  version information.
+  """
+  if line.rstrip().endswith('# OMIT FROM EXPECTATIONS'):
+    return '# THIS LINE WAS OMITTED\n'
+  return line
+
+
+def _GenerateDiffWithOnlyAdditons(expected_path, actual_data):
+  """Generate a diff that only contains additions"""
+  # Ignore blank lines when creating the diff to cut down on whitespace-only
+  # lines in the diff. Also remove trailing whitespaces and add the new lines
+  # manually (ndiff expects new lines but we don't care about trailing
+  # whitespace).
+  with open(expected_path) as expected:
+    expected_lines = [l for l in expected.readlines() if l.strip()]
+  actual_lines = [
+      '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip()
+  ]
+
+  diff = difflib.ndiff(expected_lines, actual_lines)
+  filtered_diff = (l for l in diff if l.startswith('+'))
+  return ''.join(filtered_diff)
+
+
+def _DiffFileContents(expected_path, actual_data):
+  """Check file contents for equality and return the diff or None."""
+  # Remove all trailing whitespace and add it explicitly in the end.
+  with open(expected_path) as f_expected:
+    expected_lines = [l.rstrip() for l in f_expected.readlines()]
+  actual_lines = [
+      _SkipOmitted(line).rstrip() for line in actual_data.splitlines()
+  ]
+
+  if expected_lines == actual_lines:
+    return None
+
+  expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+
+  diff = difflib.unified_diff(
+      expected_lines,
+      actual_lines,
+      fromfile=os.path.join('before', expected_path),
+      tofile=os.path.join('after', expected_path),
+      n=0,
+      lineterm='',
+  )
+
+  return '\n'.join(diff)
+
+
+def AddCommandLineFlags(parser):
+  group = parser.add_argument_group('Expectations')
+  group.add_argument(
+      '--expected-file',
+      help='Expected contents for the check. If --expected-file-base  is set, '
+      'this is a diff of --actual-file and --expected-file-base.')
+  group.add_argument(
+      '--expected-file-base',
+      help='File to diff against before comparing to --expected-file.')
+  group.add_argument('--actual-file',
+                     help='Path to write actual file (for reference).')
+  group.add_argument('--failure-file',
+                     help='Write to this file if expectations fail.')
+  group.add_argument('--fail-on-expectations',
+                     action="store_true",
+                     help='Fail on expectation mismatches.')
+  group.add_argument('--only-verify-expectations',
+                     action='store_true',
+                     help='Verify the expectation and exit.')
+
+
+def CheckExpectations(actual_data, options, custom_msg=''):
+  if options.actual_file:
+    with build_utils.AtomicOutput(options.actual_file) as f:
+      f.write(actual_data.encode('utf8'))
+  if options.expected_file_base:
+    actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
+                                                actual_data)
+  diff_text = _DiffFileContents(options.expected_file, actual_data)
+
+  if not diff_text:
+    fail_msg = ''
+  else:
+    fail_msg = """
+Expectations need updating:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expectations/README.md
+
+LogDog tip: Use "Raw log" or "Switch to lite mode" before copying:
+https://bugs.chromium.org/p/chromium/issues/detail?id=984616
+
+{}
+
+To update expectations, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(custom_msg, diff_text)
+
+    sys.stderr.write(fail_msg)
+
+  if fail_msg and options.fail_on_expectations:
+    # Don't write failure file when failing on expectations or else the target
+    # will not be re-run on subsequent ninja invocations.
+    sys.exit(1)
+
+  if options.failure_file:
+    with open(options.failure_file, 'w') as f:
+      f.write(fail_msg)
diff --git a/src/build/android/gyp/util/jar_info_utils.py b/src/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000..9759455
--- /dev/null
+++ b/src/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,59 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ReadAarSourceInfo(info_path):
+  """Returns the source= path from an .aar's source.info file."""
+  # The .info looks like: "source=path/to/.aar\n".
+  with open(info_path) as f:
+    return f.read().rstrip().split('=', 1)[1]
+
+
+def ParseJarInfoFile(info_path):
+  """Parse a given .jar.info file as a dictionary.
+
+  Args:
+    info_path: input .jar.info file path.
+  Returns:
+    A new dictionary mapping fully-qualified Java class names to file paths.
+  """
+  info_data = dict()
+  if os.path.exists(info_path):
+    with open(info_path, 'r') as info_file:
+      for line in info_file:
+        line = line.strip()
+        if line:
+          fully_qualified_name, path = line.split(',', 1)
+          info_data[fully_qualified_name] = path
+  return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+  """Generate a .jar.info file from a given dictionary.
+
+  Args:
+    output_obj: output file object.
+    info_data: a mapping of fully qualified Java class names to filepaths.
+    source_file_map: an optional mapping from java source file paths to the
+      corresponding source .srcjar. This is because info_data may contain the
+      path of Java source files that where extracted from an .srcjar into a
+      temporary location.
+  """
+  for fully_qualified_name, path in sorted(info_data.items()):
+    if source_file_map and path in source_file_map:
+      path = source_file_map[path]
+      assert not path.startswith('/tmp'), (
+          'Java file path should not be in temp dir: {}'.format(path))
+    output_obj.write(('{},{}\n'.format(fully_qualified_name,
+                                       path)).encode('utf8'))
diff --git a/src/build/android/gyp/util/java_cpp_utils.py b/src/build/android/gyp/util/java_cpp_utils.py
new file mode 100644
index 0000000..5180400
--- /dev/null
+++ b/src/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,194 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+  return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def GetJavaFilePath(java_package, class_name):
+  package_path = java_package.replace('.', os.path.sep)
+  file_name = class_name + '.java'
+  return os.path.join(package_path, file_name)
+
+
+def KCamelToShouty(s):
+  """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+  kFooBar -> FOO_BAR
+  FooBar -> FOO_BAR
+  FooBAR9 -> FOO_BAR9
+  FooBARBaz -> FOO_BAR_BAZ
+  """
+  if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+    return s
+  # Strip the leading k.
+  s = re.sub(r'^k', '', s)
+  # Treat "WebView" like one word.
+  s = re.sub(r'WebView', r'Webview', s)
+  # Add _ between title words and anything else.
+  s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+  # Add _ between lower -> upper transitions.
+  s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+  return s.upper()
+
+
+class JavaString(object):
+  def __init__(self, name, value, comments):
+    self.name = KCamelToShouty(name)
+    self.value = value
+    self.comments = '\n'.join('    ' + x for x in comments)
+
+  def Format(self):
+    return '%s\n    public static final String %s = %s;' % (
+        self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+  package_re = re.compile(r'^package (.*);')
+  class_re = re.compile(r'.*class (.*) {')
+  package = ''
+  class_name = ''
+  for line in lines:
+    package_line = package_re.match(line)
+    if package_line:
+      package = package_line.groups()[0]
+    class_line = class_re.match(line)
+    if class_line:
+      class_name = class_line.groups()[0]
+      break
+  return package, class_name
+
+
+# TODO(crbug.com/937282): Work will be needed if we want to annotate specific
+# constants in the file to be parsed.
+class CppConstantParser(object):
+  """Parses C++ constants, retaining their comments.
+
+  The Delegate subclass is responsible for matching and extracting the
+  constant's variable name and value, as well as generating an object to
+  represent the Java representation of this value.
+  """
+  SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+
+  class Delegate(object):
+    def ExtractConstantName(self, line):
+      """Extracts a constant's name from line or None if not a match."""
+      raise NotImplementedError()
+
+    def ExtractValue(self, line):
+      """Extracts a constant's value from line or None if not a match."""
+      raise NotImplementedError()
+
+    def CreateJavaConstant(self, name, value, comments):
+      """Creates an object representing the Java analog of a C++ constant.
+
+      CppConstantParser will not interact with the object created by this
+      method. Instead, it will store this value in a list and return a list of
+      all objects from the Parse() method. In this way, the caller may define
+      whatever class suits their need.
+
+      Args:
+        name: the constant's variable name, as extracted by
+          ExtractConstantName()
+        value: the constant's value, as extracted by ExtractValue()
+        comments: the code comments describing this constant
+      """
+      raise NotImplementedError()
+
+  def __init__(self, delegate, lines):
+    self._delegate = delegate
+    self._lines = lines
+    self._in_variable = False
+    self._in_comment = False
+    self._package = ''
+    self._current_comments = []
+    self._current_name = ''
+    self._current_value = ''
+    self._constants = []
+
+  def _ExtractVariable(self, line):
+    match = StringFileParser.STRING_RE.match(line)
+    return match.group(1) if match else None
+
+  def _ExtractValue(self, line):
+    match = StringFileParser.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def _Reset(self):
+    self._current_comments = []
+    self._current_name = ''
+    self._current_value = ''
+    self._in_variable = False
+    self._in_comment = False
+
+  def _AppendConstant(self):
+    self._constants.append(
+        self._delegate.CreateJavaConstant(self._current_name,
+                                          self._current_value,
+                                          self._current_comments))
+    self._Reset()
+
+  def _ParseValue(self, line):
+    current_value = self._delegate.ExtractValue(line)
+    if current_value is not None:
+      self._current_value = current_value
+      self._AppendConstant()
+    else:
+      self._Reset()
+
+  def _ParseComment(self, line):
+    comment_line = CppConstantParser.SINGLE_LINE_COMMENT_RE.match(line)
+    if comment_line:
+      self._current_comments.append(comment_line.groups()[0])
+      self._in_comment = True
+      self._in_variable = True
+      return True
+    else:
+      self._in_comment = False
+      return False
+
+  def _ParseVariable(self, line):
+    current_name = self._delegate.ExtractConstantName(line)
+    if current_name is not None:
+      self._current_name = current_name
+      current_value = self._delegate.ExtractValue(line)
+      if current_value is not None:
+        self._current_value = current_value
+        self._AppendConstant()
+      else:
+        self._in_variable = True
+      return True
+    else:
+      self._in_variable = False
+      return False
+
+  def _ParseLine(self, line):
+    if not self._in_variable:
+      if not self._ParseVariable(line):
+        self._ParseComment(line)
+      return
+
+    if self._in_comment:
+      if self._ParseComment(line):
+        return
+      if not self._ParseVariable(line):
+        self._Reset()
+      return
+
+    if self._in_variable:
+      self._ParseValue(line)
+
+  def Parse(self):
+    """Returns a list of objects representing C++ constants.
+
+    Each object in the list was created by Delegate.CreateJavaValue().
+    """
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._constants
diff --git a/src/build/android/gyp/util/manifest_utils.py b/src/build/android/gyp/util/manifest_utils.py
new file mode 100644
index 0000000..a517708
--- /dev/null
+++ b/src/build/android/gyp/util/manifest_utils.py
@@ -0,0 +1,321 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for working with Android manifests."""
+
+import hashlib
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+from xml.etree import ElementTree
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution'
+EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml'))
+# When normalizing for expectation matching, wrap these tags when they are long
+# or else they become very hard to read.
+_WRAP_CANDIDATES = (
+    '<manifest',
+    '<application',
+    '<activity',
+    '<provider',
+    '<receiver',
+    '<service',
+)
+# Don't wrap lines shorter than this.
+_WRAP_LINE_LENGTH = 100
+
+_xml_namespace_initialized = False
+
+
+def _RegisterElementTreeNamespaces():
+  global _xml_namespace_initialized
+  if _xml_namespace_initialized:
+    return
+  _xml_namespace_initialized = True
+  ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+  ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+  ElementTree.register_namespace('dist', DIST_NAMESPACE)
+
+
+def ParseManifest(path):
+  """Parses an AndroidManifest.xml using ElementTree.
+
+  Registers required namespaces, creates application node if missing, adds any
+  missing namespaces for 'android', 'tools' and 'dist'.
+
+  Returns tuple of:
+    doc: Root xml document.
+    manifest_node: the <manifest> node.
+    app_node: the <application> node.
+  """
+  _RegisterElementTreeNamespaces()
+  doc = ElementTree.parse(path)
+  # ElementTree.find does not work if the required tag is the root.
+  if doc.getroot().tag == 'manifest':
+    manifest_node = doc.getroot()
+  else:
+    manifest_node = doc.find('manifest')
+
+  app_node = doc.find('application')
+  if app_node is None:
+    app_node = ElementTree.SubElement(manifest_node, 'application')
+
+  return doc, manifest_node, app_node
+
+
+def SaveManifest(doc, path):
+  with build_utils.AtomicOutput(path) as f:
+    f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+
+def GetPackage(manifest_node):
+  return manifest_node.get('package')
+
+
+def AssertUsesSdk(manifest_node,
+                  min_sdk_version=None,
+                  target_sdk_version=None,
+                  max_sdk_version=None,
+                  fail_if_not_exist=False):
+  """Asserts values of attributes of <uses-sdk> element.
+
+  Unless |fail_if_not_exist| is true, will only assert if both the passed value
+  is not None and the value of attribute exist. If |fail_if_not_exist| is true
+  will fail if passed value is not None but attribute does not exist.
+  """
+  uses_sdk_node = manifest_node.find('./uses-sdk')
+  if uses_sdk_node is None:
+    return
+  for prefix, sdk_version in (('min', min_sdk_version), ('target',
+                                                         target_sdk_version),
+                              ('max', max_sdk_version)):
+    value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
+    if fail_if_not_exist and not value and sdk_version:
+      assert False, (
+          '%sSdkVersion in Android manifest does not exist but we expect %s' %
+          (prefix, sdk_version))
+    if not value or not sdk_version:
+      continue
+    assert value == sdk_version, (
+        '%sSdkVersion in Android manifest is %s but we expect %s' %
+        (prefix, value, sdk_version))
+
+
+def AssertPackage(manifest_node, package):
+  """Asserts that manifest package has desired value.
+
+  Will only assert if both |package| is not None and the package is set in the
+  manifest.
+  """
+  package_value = GetPackage(manifest_node)
+  if package_value is None or package is None:
+    return
+  assert package_value == package, (
+      'Package in Android manifest is %s but we expect %s' % (package_value,
+                                                              package))
+
+
+def _SortAndStripElementTree(root):
+  # Sort alphabetically with two exceptions:
+  # 1) Put <application> node last (since it's giant).
+  # 2) Put android:name before other attributes.
+  def element_sort_key(node):
+    if node.tag == 'application':
+      return 'z'
+    ret = ElementTree.tostring(node)
+    # ElementTree.tostring inserts namespace attributes for any that are needed
+    # for the node or any of its descendants. Remove them so as to prevent a
+    # change to a child that adds/removes a namespace usage from changing sort
+    # order.
+    return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8'))
+
+  name_attr = '{%s}name' % ANDROID_NAMESPACE
+
+  def attribute_sort_key(tup):
+    return ('', '') if tup[0] == name_attr else tup
+
+  def helper(node):
+    for child in node:
+      if child.text and child.text.isspace():
+        child.text = None
+      helper(child)
+
+    # Sort attributes (requires Python 3.8+).
+    node.attrib = dict(sorted(node.attrib.items(), key=attribute_sort_key))
+
+    # Sort nodes
+    node[:] = sorted(node, key=element_sort_key)
+
+  helper(root)
+
+
+def _SplitElement(line):
+  """Parses a one-line xml node into ('<tag', ['a="b"', ...]], '/>')."""
+
+  # Shlex splits nicely, but removes quotes. Need to put them back.
+  def restore_quotes(value):
+    return value.replace('=', '="', 1) + '"'
+
+  # Simplify restore_quotes by separating />.
+  assert line.endswith('>'), line
+  end_tag = '>'
+  if line.endswith('/>'):
+    end_tag = '/>'
+  line = line[:-len(end_tag)]
+
+  # Use shlex to avoid having to re-encode &quot;, etc.
+  parts = shlex.split(line)
+  start_tag = parts[0]
+  attrs = parts[1:]
+
+  return start_tag, [restore_quotes(x) for x in attrs], end_tag
+
+
+def _CreateNodeHash(lines):
+  """Computes a hash (md5) for the first XML node found in |lines|.
+
+  Args:
+    lines: List of strings containing pretty-printed XML.
+
+  Returns:
+    Positive 32-bit integer hash of the node (including children).
+  """
+  target_indent = lines[0].find('<')
+  tag_closed = False
+  for i, l in enumerate(lines[1:]):
+    cur_indent = l.find('<')
+    if cur_indent != -1 and cur_indent <= target_indent:
+      tag_lines = lines[:i + 1]
+      break
+    elif not tag_closed and 'android:name="' in l:
+      # To reduce noise of node tags changing, use android:name as the
+      # basis the hash since they usually unique.
+      tag_lines = [l]
+      break
+    tag_closed = tag_closed or '>' in l
+  else:
+    assert False, 'Did not find end of node:\n' + '\n'.join(lines)
+
+  # Insecure and truncated hash as it only needs to be unique vs. its neighbors.
+  return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8]
+
+
+def _IsSelfClosing(lines):
+  """Given pretty-printed xml, returns whether first node is self-closing."""
+  for l in lines:
+    idx = l.find('>')
+    if idx != -1:
+      return l[idx - 1] == '/'
+  assert False, 'Did not find end of tag:\n' + '\n'.join(lines)
+
+
+def _AddDiffTags(lines):
+  # When multiple identical tags appear sequentially, XML diffs can look like:
+  # +  </tag>
+  # +  <tag>
+  # rather than:
+  # +  <tag>
+  # +  </tag>
+  # To reduce confusion, add hashes to tags.
+  # This also ensures changed tags show up with outer <tag> elements rather than
+  # showing only changed attributes.
+  hash_stack = []
+  for i, l in enumerate(lines):
+    stripped = l.lstrip()
+    # Ignore non-indented tags and lines that are not the start/end of a node.
+    if l[0] != ' ' or stripped[0] != '<':
+      continue
+    # Ignore self-closing nodes that fit on one line.
+    if l[-2:] == '/>':
+      continue
+    # Ignore <application> since diff tag changes with basically any change.
+    if stripped.lstrip('</').startswith('application'):
+      continue
+
+    # Check for the closing tag (</foo>).
+    if stripped[1] != '/':
+      cur_hash = _CreateNodeHash(lines[i:])
+      if not _IsSelfClosing(lines[i:]):
+        hash_stack.append(cur_hash)
+    else:
+      cur_hash = hash_stack.pop()
+    lines[i] += '  # DIFF-ANCHOR: {}'.format(cur_hash)
+  assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
+
+
+def NormalizeManifest(manifest_contents):
+  _RegisterElementTreeNamespaces()
+  # This also strips comments and sorts node attributes alphabetically.
+  root = ElementTree.fromstring(manifest_contents)
+  package = GetPackage(root)
+
+  app_node = root.find('application')
+  if app_node is not None:
+    # android:debuggable is added when !is_official_build. Strip it out to avoid
+    # expectation diffs caused by not adding is_official_build. Play store
+    # blocks uploading apps with it set, so there's no risk of it slipping in.
+    debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE
+    if debuggable_name in app_node.attrib:
+      del app_node.attrib[debuggable_name]
+
+    # Trichrome's static library version number is updated daily. To avoid
+    # frequent manifest check failures, we remove the exact version number
+    # during normalization.
+    for node in app_node:
+      if (node.tag in ['uses-static-library', 'static-library']
+          and '{%s}version' % ANDROID_NAMESPACE in node.keys()
+          and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
+        node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+
+  # We also remove the exact package name (except the one at the root level)
+  # to avoid noise during manifest comparison.
+  def blur_package_name(node):
+    for key in node.keys():
+      node.set(key, node.get(key).replace(package, '$PACKAGE'))
+
+    for child in node:
+      blur_package_name(child)
+
+  # We only blur the package names of non-root nodes because they generate a lot
+  # of diffs when doing manifest checks for upstream targets. We still want to
+  # have 1 piece of package name not blurred just in case the package name is
+  # mistakenly changed.
+  for child in root:
+    blur_package_name(child)
+
+  _SortAndStripElementTree(root)
+
+  # Fix up whitespace/indentation.
+  dom = minidom.parseString(ElementTree.tostring(root))
+  out_lines = []
+  for l in dom.toprettyxml(indent='  ').splitlines():
+    if not l or l.isspace():
+      continue
+    if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES):
+      indent = ' ' * l.find('<')
+      start_tag, attrs, end_tag = _SplitElement(l)
+      out_lines.append('{}{}'.format(indent, start_tag))
+      for attribute in attrs:
+        out_lines.append('{}    {}'.format(indent, attribute))
+      out_lines[-1] += '>'
+      # Heuristic: Do not allow multi-line tags to be self-closing since these
+      # can generally be allowed to have nested elements. When diffing, it adds
+      # noise if the base file is self-closing and the non-base file is not
+      # self-closing.
+      if end_tag == '/>':
+        out_lines.append('{}{}>'.format(indent, start_tag.replace('<', '</')))
+    else:
+      out_lines.append(l)
+
+  # Make output more diff-friendly.
+  _AddDiffTags(out_lines)
+
+  return '\n'.join(out_lines) + '\n'
diff --git a/src/build/android/gyp/util/manifest_utils_test.py b/src/build/android/gyp/util/manifest_utils_test.py
new file mode 100755
index 0000000..52bf458
--- /dev/null
+++ b/src/build/android/gyp/util/manifest_utils_test.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
+from util import manifest_utils
+
+_TEST_MANIFEST = """\
+<?xml version="1.0" ?>
+<manifest package="test.pkg"
+    tools:ignore="MissingVersion"
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools">
+  <!-- Should be one line. -->
+  <uses-sdk android:minSdkVersion="24"
+      android:targetSdkVersion="30"/>
+  <!-- Should have attrs sorted-->
+  <uses-feature android:required="false" android:version="1"
+    android:name="android.hardware.vr.headtracking" />
+  <!-- Should not be wrapped since < 100 chars. -->
+  <application
+      android:name="testname">
+    <activity
+        {extra_activity_attr}
+        android:icon="@drawable/ic_devices_48dp"
+        android:label="label with spaces"
+        android:name="to be hashed"
+        android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+      <intent-filter>
+        {extra_intent_filter_elem}
+        <action android:name="android.intent.action.SEND"/>
+        <category android:name="android.intent.category.DEFAULT"/>
+        <data android:mimeType="text/plain"/>
+      </intent-filter>
+    </activity>
+    <!-- Should be made non-self-closing. -->
+    <receiver android:exported="false" android:name="\
+org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"/>
+  </application>
+</manifest>
+"""
+
+_TEST_MANIFEST_NORMALIZED = """\
+<?xml version="1.0" ?>
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    package="test.pkg"
+    tools:ignore="MissingVersion">
+  <uses-feature android:name="android.hardware.vr.headtracking" \
+android:required="false" android:version="1"/>
+  <uses-sdk android:minSdkVersion="24" android:targetSdkVersion="30"/>
+  <application android:name="testname">
+    <activity  # DIFF-ANCHOR: {activity_diff_anchor}
+        android:name="to be hashed"
+        {extra_activity_attr}android:icon="@drawable/ic_devices_48dp"
+        android:label="label with spaces"
+        android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+      <intent-filter>  # DIFF-ANCHOR: {intent_filter_diff_anchor}
+        {extra_intent_filter_elem}\
+<action android:name="android.intent.action.SEND"/>
+        <category android:name="android.intent.category.DEFAULT"/>
+        <data android:mimeType="text/plain"/>
+      </intent-filter>  # DIFF-ANCHOR: {intent_filter_diff_anchor}
+    </activity>  # DIFF-ANCHOR: {activity_diff_anchor}
+    <receiver  # DIFF-ANCHOR: ddab3320
+        android:name=\
+"org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"
+        android:exported="false">
+    </receiver>  # DIFF-ANCHOR: ddab3320
+  </application>
+</manifest>
+"""
+
+_ACTIVITY_DIFF_ANCHOR = '32b3a641'
+_INTENT_FILTER_DIFF_ANCHOR = '4ee601b7'
+
+
+def _CreateTestData(intent_filter_diff_anchor=_INTENT_FILTER_DIFF_ANCHOR,
+                    extra_activity_attr='',
+                    extra_intent_filter_elem=''):
+  if extra_activity_attr:
+    extra_activity_attr += '\n        '
+  if extra_intent_filter_elem:
+    extra_intent_filter_elem += '\n        '
+  test_manifest = _TEST_MANIFEST.format(
+      extra_activity_attr=extra_activity_attr,
+      extra_intent_filter_elem=extra_intent_filter_elem)
+  expected = _TEST_MANIFEST_NORMALIZED.format(
+      activity_diff_anchor=_ACTIVITY_DIFF_ANCHOR,
+      intent_filter_diff_anchor=intent_filter_diff_anchor,
+      extra_activity_attr=extra_activity_attr,
+      extra_intent_filter_elem=extra_intent_filter_elem)
+  return test_manifest, expected
+
+
+class ManifestUtilsTest(unittest.TestCase):
+  # Enable diff output.
+  maxDiff = None
+
+  def testNormalizeManifest_golden(self):
+    test_manifest, expected = _CreateTestData()
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    self.assertMultiLineEqual(expected, actual)
+
+  def testNormalizeManifest_nameUsedForActivity(self):
+    test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"')
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    # Checks that the DIFF-ANCHOR does not change with the added attribute.
+    self.assertMultiLineEqual(expected, actual)
+
+  def testNormalizeManifest_nameNotUsedForIntentFilter(self):
+    test_manifest, expected = _CreateTestData(
+        extra_intent_filter_elem='<a/>', intent_filter_diff_anchor='5f5c8a70')
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    # Checks that the DIFF-ANCHOR does change with the added element despite
+    # having a nested element with an android:name set.
+    self.assertMultiLineEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/md5_check.py b/src/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..87ee723
--- /dev/null
+++ b/src/build/android/gyp/util/md5_check.py
@@ -0,0 +1,471 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndWriteDepfileIfStale(on_stale_md5,
+                               options,
+                               record_path=None,
+                               input_paths=None,
+                               input_strings=None,
+                               output_paths=None,
+                               force=False,
+                               pass_changes=False,
+                               track_subpaths_allowlist=None,
+                               depfile_deps=None):
+  """Wraps CallAndRecordIfStale() and writes a depfile if applicable.
+
+  Depfiles are automatically added to output_paths when present in the |options|
+  argument. They are then created after |on_stale_md5| is called.
+
+  By default, only python dependencies are added to the depfile. If there are
+  other input paths that are not captured by GN deps, then they should be listed
+  in depfile_deps. It's important to write paths to the depfile that are already
+  captured by GN deps since GN args can cause GN deps to change, and such
+  changes are not immediately reflected in depfiles (http://crbug.com/589311).
+  """
+  if not output_paths:
+    raise Exception('At least one output_path must be specified.')
+  input_paths = list(input_paths or [])
+  input_strings = list(input_strings or [])
+  output_paths = list(output_paths or [])
+
+  input_paths += print_python_deps.ComputePythonDependencies()
+
+  CallAndRecordIfStale(
+      on_stale_md5,
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths,
+      force=force,
+      pass_changes=pass_changes,
+      track_subpaths_allowlist=track_subpaths_allowlist)
+
+  # Write depfile even when inputs have not changed to ensure build correctness
+  # on bots that build with & without patch, and the patch changes the depfile
+  # location.
+  if hasattr(options, 'depfile') and options.depfile:
+    build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
+
+
+def CallAndRecordIfStale(function,
+                         record_path=None,
+                         input_paths=None,
+                         input_strings=None,
+                         output_paths=None,
+                         force=False,
+                         pass_changes=False,
+                         track_subpaths_allowlist=None):
+  """Calls function if outputs are stale.
+
+  Outputs are considered stale if:
+  - any output_paths are missing, or
+  - the contents of any file within input_paths has changed, or
+  - the contents of input_strings has changed.
+
+  To debug which files are out-of-date, set the environment variable:
+      PRINT_MD5_DIFFS=1
+
+  Args:
+    function: The function to call.
+    record_path: Path to record metadata.
+      Defaults to output_paths[0] + '.md5.stamp'
+    input_paths: List of paths to calcualte an md5 sum on.
+    input_strings: List of strings to record verbatim.
+    output_paths: List of output paths.
+    force: Whether to treat outputs as missing regardless of whether they
+      actually are.
+    pass_changes: Whether to pass a Changes instance to |function|.
+    track_subpaths_allowlist: Relevant only when pass_changes=True. List of .zip
+      files from |input_paths| to make subpath information available for.
+  """
+  assert record_path or output_paths
+  input_paths = input_paths or []
+  input_strings = input_strings or []
+  output_paths = output_paths or []
+  record_path = record_path or output_paths[0] + '.md5.stamp'
+
+  assert record_path.endswith('.stamp'), (
+      'record paths must end in \'.stamp\' so that they are easy to find '
+      'and delete')
+
+  new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+  new_metadata.AddStrings(input_strings)
+
+  zip_allowlist = set(track_subpaths_allowlist or [])
+  for path in input_paths:
+    # It's faster to md5 an entire zip file than it is to just locate & hash
+    # its central directory (which is what this used to do).
+    if path in zip_allowlist:
+      entries = _ExtractZipEntries(path)
+      new_metadata.AddZipFile(path, entries)
+    else:
+      new_metadata.AddFile(path, _ComputeTagForPath(path))
+
+  old_metadata = None
+  force = force or _FORCE_REBUILD
+  missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+  too_new = []
+  # When outputs are missing, don't bother gathering change information.
+  if not missing_outputs and os.path.exists(record_path):
+    record_mtime = os.path.getmtime(record_path)
+    # Outputs newer than the change information must have been modified outside
+    # of the build, and should be considered stale.
+    too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+    if not too_new:
+      with open(record_path, 'r') as jsonfile:
+        try:
+          old_metadata = _Metadata.FromFile(jsonfile)
+        except:  # pylint: disable=bare-except
+          pass  # Not yet using new file format.
+
+  changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
+  if not changes.HasChanges():
+    return
+
+  if PRINT_EXPLANATIONS:
+    print('=' * 80)
+    print('Target is stale: %s' % record_path)
+    print(changes.DescribeDifference())
+    print('=' * 80)
+
+  args = (changes,) if pass_changes else ()
+  function(*args)
+
+  with open(record_path, 'w') as f:
+    new_metadata.ToFile(f)
+
+
+class Changes(object):
+  """Provides and API for querying what changed between runs."""
+
+  def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+               too_new):
+    self.old_metadata = old_metadata
+    self.new_metadata = new_metadata
+    self.force = force
+    self.missing_outputs = missing_outputs
+    self.too_new = too_new
+
+  def _GetOldTag(self, path, subpath=None):
+    return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+  def HasChanges(self):
+    """Returns whether any changes exist."""
+    return (self.HasStringChanges()
+            or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+  def HasStringChanges(self):
+    """Returns whether string metadata changed."""
+    return (self.force or not self.old_metadata
+            or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
+
+  def AddedOrModifiedOnly(self):
+    """Returns whether the only changes were from added or modified (sub)files.
+
+    No missing outputs, no removed paths/subpaths.
+    """
+    if self.HasStringChanges():
+      return False
+    if any(self.IterRemovedPaths()):
+      return False
+    for path in self.IterModifiedPaths():
+      if any(self.IterRemovedSubpaths(path)):
+        return False
+    return True
+
+  def IterAllPaths(self):
+    """Generator for paths."""
+    return self.new_metadata.IterPaths();
+
+  def IterAllSubpaths(self, path):
+    """Generator for subpaths."""
+    return self.new_metadata.IterSubpaths(path);
+
+  def IterAddedPaths(self):
+    """Generator for paths that were added."""
+    for path in self.new_metadata.IterPaths():
+      if self._GetOldTag(path) is None:
+        yield path
+
+  def IterAddedSubpaths(self, path):
+    """Generator for paths that were added within the given zip file."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      if self._GetOldTag(path, subpath) is None:
+        yield subpath
+
+  def IterRemovedPaths(self):
+    """Generator for paths that were removed."""
+    if self.old_metadata:
+      for path in self.old_metadata.IterPaths():
+        if self.new_metadata.GetTag(path) is None:
+          yield path
+
+  def IterRemovedSubpaths(self, path):
+    """Generator for paths that were removed within the given zip file."""
+    if self.old_metadata:
+      for subpath in self.old_metadata.IterSubpaths(path):
+        if self.new_metadata.GetTag(path, subpath) is None:
+          yield subpath
+
+  def IterModifiedPaths(self):
+    """Generator for paths whose contents have changed."""
+    for path in self.new_metadata.IterPaths():
+      old_tag = self._GetOldTag(path)
+      new_tag = self.new_metadata.GetTag(path)
+      if old_tag is not None and old_tag != new_tag:
+        yield path
+
+  def IterModifiedSubpaths(self, path):
+    """Generator for paths within a zip file whose contents have changed."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      old_tag = self._GetOldTag(path, subpath)
+      new_tag = self.new_metadata.GetTag(path, subpath)
+      if old_tag is not None and old_tag != new_tag:
+        yield subpath
+
+  def IterChangedPaths(self):
+    """Generator for all changed paths (added/removed/modified)."""
+    return itertools.chain(self.IterRemovedPaths(),
+                           self.IterModifiedPaths(),
+                           self.IterAddedPaths())
+
+  def IterChangedSubpaths(self, path):
+    """Generator for paths within a zip that were added/removed/modified."""
+    return itertools.chain(self.IterRemovedSubpaths(path),
+                           self.IterModifiedSubpaths(path),
+                           self.IterAddedSubpaths(path))
+
+  def DescribeDifference(self):
+    """Returns a human-readable description of what changed."""
+    if self.force:
+      return 'force=True'
+    elif self.missing_outputs:
+      return 'Outputs do not exist:\n  ' + '\n  '.join(self.missing_outputs)
+    elif self.too_new:
+      return 'Outputs newer than stamp file:\n  ' + '\n  '.join(self.too_new)
+    elif self.old_metadata is None:
+      return 'Previous stamp file not found.'
+
+    if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+      ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+                            self.new_metadata.GetStrings())
+      changed = [s for s in ndiff if not s.startswith(' ')]
+      return 'Input strings changed:\n  ' + '\n  '.join(changed)
+
+    if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+      return "There's no difference."
+
+    lines = []
+    lines.extend('Added: ' + p for p in self.IterAddedPaths())
+    lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+    for path in self.IterModifiedPaths():
+      lines.append('Modified: ' + path)
+      lines.extend('  -> Subpath added: ' + p
+                   for p in self.IterAddedSubpaths(path))
+      lines.extend('  -> Subpath removed: ' + p
+                   for p in self.IterRemovedSubpaths(path))
+      lines.extend('  -> Subpath modified: ' + p
+                   for p in self.IterModifiedSubpaths(path))
+    if lines:
+      return 'Input files changed:\n  ' + '\n  '.join(lines)
+    return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+  """Data model for tracking change metadata.
+
+  Args:
+    track_entries: Enables per-file change tracking. Slower, but required for
+        Changes functionality.
+  """
+  # Schema:
+  # {
+  #   "files-md5": "VALUE",
+  #   "strings-md5": "VALUE",
+  #   "input-files": [
+  #     {
+  #       "path": "path.jar",
+  #       "tag": "{MD5 of entries}",
+  #       "entries": [
+  #         { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+  #       ]
+  #     }, {
+  #       "path": "path.txt",
+  #       "tag": "{MD5}",
+  #     }
+  #   ],
+  #   "input-strings": ["a", "b", ...],
+  # }
+  def __init__(self, track_entries=False):
+    self._track_entries = track_entries
+    self._files_md5 = None
+    self._strings_md5 = None
+    self._files = []
+    self._strings = []
+    # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+    self._file_map = None
+
+  @classmethod
+  def FromFile(cls, fileobj):
+    """Returns a _Metadata initialized from a file object."""
+    ret = cls()
+    obj = json.load(fileobj)
+    ret._files_md5 = obj['files-md5']
+    ret._strings_md5 = obj['strings-md5']
+    ret._files = obj.get('input-files', [])
+    ret._strings = obj.get('input-strings', [])
+    return ret
+
+  def ToFile(self, fileobj):
+    """Serializes metadata to the given file object."""
+    obj = {
+        'files-md5': self.FilesMd5(),
+        'strings-md5': self.StringsMd5(),
+    }
+    if self._track_entries:
+      obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+      obj['input-strings'] = self._strings
+
+    json.dump(obj, fileobj, indent=2)
+
+  def _AssertNotQueried(self):
+    assert self._files_md5 is None
+    assert self._strings_md5 is None
+    assert self._file_map is None
+
+  def AddStrings(self, values):
+    self._AssertNotQueried()
+    self._strings.extend(str(v) for v in values)
+
+  def AddFile(self, path, tag):
+    """Adds metadata for a non-zip file.
+
+    Args:
+      path: Path to the file.
+      tag: A short string representative of the file contents.
+    """
+    self._AssertNotQueried()
+    self._files.append({
+        'path': path,
+        'tag': tag,
+    })
+
+  def AddZipFile(self, path, entries):
+    """Adds metadata for a zip file.
+
+    Args:
+      path: Path to the file.
+      entries: List of (subpath, tag) tuples for entries within the zip.
+    """
+    self._AssertNotQueried()
+    tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+                                            (e[1] for e in entries)))
+    self._files.append({
+        'path': path,
+        'tag': tag,
+        'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+    })
+
+  def GetStrings(self):
+    """Returns the list of input strings."""
+    return self._strings
+
+  def FilesMd5(self):
+    """Lazily computes and returns the aggregate md5 of input files."""
+    if self._files_md5 is None:
+      # Omit paths from md5 since temporary files have random names.
+      self._files_md5 = _ComputeInlineMd5(
+          self.GetTag(p) for p in sorted(self.IterPaths()))
+    return self._files_md5
+
+  def StringsMd5(self):
+    """Lazily computes and returns the aggregate md5 of input strings."""
+    if self._strings_md5 is None:
+      self._strings_md5 = _ComputeInlineMd5(self._strings)
+    return self._strings_md5
+
+  def _GetEntry(self, path, subpath=None):
+    """Returns the JSON entry for the given path / subpath."""
+    if self._file_map is None:
+      self._file_map = {}
+      for entry in self._files:
+        self._file_map[(entry['path'], None)] = entry
+        for subentry in entry.get('entries', ()):
+          self._file_map[(entry['path'], subentry['path'])] = subentry
+    return self._file_map.get((path, subpath))
+
+  def GetTag(self, path, subpath=None):
+    """Returns the tag for the given path / subpath."""
+    ret = self._GetEntry(path, subpath)
+    return ret and ret['tag']
+
+  def IterPaths(self):
+    """Returns a generator for all top-level paths."""
+    return (e['path'] for e in self._files)
+
+  def IterSubpaths(self, path):
+    """Returns a generator for all subpaths in the given zip.
+
+    If the given path is not a zip file or doesn't exist, returns an empty
+    iterable.
+    """
+    outer_entry = self._GetEntry(path)
+    if not outer_entry:
+      return ()
+    subentries = outer_entry.get('entries', [])
+    return (entry['path'] for entry in subentries)
+
+
+def _ComputeTagForPath(path):
+  stat = os.stat(path)
+  if stat.st_size > 1 * 1024 * 1024:
+    # Fallback to mtime for large files so that md5_check does not take too long
+    # to run.
+    return stat.st_mtime
+  md5 = hashlib.md5()
+  with open(path, 'rb') as f:
+    md5.update(f.read())
+  return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+  """Computes the md5 of the concatenated parameters."""
+  md5 = hashlib.md5()
+  for item in iterable:
+    md5.update(str(item).encode('ascii'))
+  return md5.hexdigest()
+
+
+def _ExtractZipEntries(path):
+  """Returns a list of (path, CRC32) of all files within |path|."""
+  entries = []
+  with zipfile.ZipFile(path) as zip_file:
+    for zip_info in zip_file.infolist():
+      # Skip directories and empty files.
+      if zip_info.CRC:
+        entries.append(
+            (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+  return entries
diff --git a/src/build/android/gyp/util/md5_check_test.py b/src/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000..e11bbd5
--- /dev/null
+++ b/src/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+  with zipfile.ZipFile(path, 'w') as zip_file:
+    for subpath, data in entries:
+      zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+    self.changes = None
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+    input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+    file1_contents = b'input file 1'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    # Test out empty zip file to start.
+    _WriteZipFile(input_file2.name, [])
+    input_files = [input_file1.name, input_file2.name]
+    zip_paths = [input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call,
+                           message,
+                           force=False,
+                           outputs_specified=False,
+                           outputs_missing=False,
+                           expected_changes=None,
+                           added_or_modified_only=None,
+                           track_subentries=False,
+                           output_newer_than_record=False):
+      output_paths = None
+      if outputs_specified:
+        output_file1 = tempfile.NamedTemporaryFile()
+        if outputs_missing:
+          output_file1.close()  # Gets deleted on close().
+        output_paths = [output_file1.name]
+      if output_newer_than_record:
+        output_mtime = os.path.getmtime(output_file1.name)
+        os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+      else:
+        # touch the record file so it doesn't look like it's older that
+        # the output we've just created
+        os.utime(record_path.name, None)
+
+      self.called = False
+      self.changes = None
+      if expected_changes or added_or_modified_only is not None:
+        def MarkCalled(changes):
+          self.called = True
+          self.changes = changes
+      else:
+        def MarkCalled():
+          self.called = True
+
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          output_paths=output_paths,
+          force=force,
+          pass_changes=(expected_changes or added_or_modified_only) is not None,
+          track_subpaths_allowlist=zip_paths if track_subentries else None)
+      self.assertEqual(should_call, self.called, message)
+      if expected_changes:
+        description = self.changes.DescribeDifference()
+        self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+                        'Expected %s to match %s' % (
+                        repr(description), repr(expected_changes)))
+      if should_call and added_or_modified_only is not None:
+        self.assertEqual(added_or_modified_only,
+                         self.changes.AddedOrModifiedOnly())
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+                       expected_changes='Previous stamp file not found.',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    CheckCallAndRecord(False, 'should not call when nothing changed #2',
+                       outputs_specified=True, outputs_missing=False)
+    CheckCallAndRecord(True, 'should call when output missing',
+                       outputs_specified=True, outputs_missing=True,
+                       expected_changes='Outputs do not exist:*',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(True,
+                       'should call when output is newer than record',
+                       expected_changes='Outputs newer than stamp file:*',
+                       outputs_specified=True,
+                       outputs_missing=False,
+                       added_or_modified_only=False,
+                       output_newer_than_record=True)
+    CheckCallAndRecord(True, force=True, message='should call when forced',
+                       expected_changes='force=True',
+                       added_or_modified_only=False)
+
+    input_file1.write(b'some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call',
+                       expected_changes='*Modified: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call',
+                       expected_changes='*Removed: %s' % input_file1.name,
+                       added_or_modified_only=False)
+
+    input_files.append(input_file1.name)
+    CheckCallAndRecord(True, 'added input file should trigger call',
+                       expected_changes='*Added: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call',
+                       expected_changes='*Input strings changed*',
+                       added_or_modified_only=False)
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+                       expected_changes='*Input strings changed*')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(
+        True,
+        'added input string should trigger call',
+        added_or_modified_only=False)
+
+    _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+    CheckCallAndRecord(
+        True,
+        'added subpath should trigger call',
+        expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+                                                              'path/1.txt'),
+        added_or_modified_only=True,
+        track_subentries=True)
+    _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+    CheckCallAndRecord(
+        True,
+        'changed subpath should trigger call',
+        expected_changes='*Modified: %s*Subpath modified: %s' %
+        (input_file2.name, 'path/1.txt'),
+        added_or_modified_only=True,
+        track_subentries=True)
+
+    _WriteZipFile(input_file2.name, [])
+    CheckCallAndRecord(True, 'removed subpath should trigger call',
+                       expected_changes='*Modified: %s*Subpath removed: %s' % (
+                                        input_file2.name, 'path/1.txt'),
+                       added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/parallel.py b/src/build/android/gyp/util/parallel.py
new file mode 100644
index 0000000..c26875a
--- /dev/null
+++ b/src/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+  logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+  def __init__(self, value):
+    self._value = value
+
+  def get(self):
+    return self._value
+
+  def wait(self):
+    pass
+
+  def ready(self):
+    return True
+
+  def successful(self):
+    return True
+
+
+class _ExceptionWrapper(object):
+  """Used to marshal exception messages back to main process."""
+
+  def __init__(self, msg, exception_type=None):
+    self.msg = msg
+    self.exception_type = exception_type
+
+  def MaybeThrow(self):
+    if self.exception_type:
+      raise getattr(__builtins__,
+                    self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+  """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+  def __init__(self, func):
+    global _is_child_process
+    _is_child_process = True
+    self._func = func
+
+  def __call__(self, index, _=None):
+    try:
+      return self._func(*_fork_params[index], **_fork_kwargs)
+    except Exception as e:
+      # Only keep the exception type for builtin exception types or else risk
+      # further marshalling exceptions.
+      exception_type = None
+      if hasattr(__builtins__, type(e).__name__):
+        exception_type = type(e).__name__
+      # multiprocessing is supposed to catch and return exceptions automatically
+      # but it doesn't seem to work properly :(.
+      return _ExceptionWrapper(traceback.format_exc(), exception_type)
+    except:  # pylint: disable=bare-except
+      return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+  """Allows for host-side logic to be run after child process has terminated.
+
+  * Unregisters associated pool _all_pools.
+  * Raises exception caught by _FuncWrapper.
+  """
+
+  def __init__(self, result, pool=None):
+    self._result = result
+    self._pool = pool
+
+  def get(self):
+    self.wait()
+    value = self._result.get()
+    _CheckForException(value)
+    return value
+
+  def wait(self):
+    self._result.wait()
+    if self._pool:
+      _all_pools.remove(self._pool)
+      self._pool = None
+
+  def ready(self):
+    return self._result.ready()
+
+  def successful(self):
+    return self._result.successful()
+
+
+def _TerminatePools():
+  """Calls .terminate() on all active process pools.
+
+  Not supposed to be necessary according to the docs, but seems to be required
+  when child process throws an exception or Ctrl-C is hit.
+  """
+  global _silence_exceptions
+  _silence_exceptions = True
+  # Child processes cannot have pools, but atexit runs this function because
+  # it was registered before fork()ing.
+  if _is_child_process:
+    return
+
+  def close_pool(pool):
+    try:
+      pool.terminate()
+    except:  # pylint: disable=bare-except
+      pass
+
+  for i, pool in enumerate(_all_pools):
+    # Without calling terminate() on a separate thread, the call can block
+    # forever.
+    thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+                              target=close_pool,
+                              args=(pool, ))
+    thread.daemon = True
+    thread.start()
+
+
+def _CheckForException(value):
+  if isinstance(value, _ExceptionWrapper):
+    global _silence_exceptions
+    if not _silence_exceptions:
+      value.MaybeThrow()
+      _silence_exceptions = True
+      logging.error('Subprocess raised an exception:\n%s', value.msg)
+    sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+  global _all_pools
+  global _fork_params
+  global _fork_kwargs
+  assert _fork_params is None
+  assert _fork_kwargs is None
+  pool_size = min(len(job_params), multiprocessing.cpu_count())
+  _fork_params = job_params
+  _fork_kwargs = job_kwargs
+  ret = multiprocessing.Pool(pool_size)
+  _fork_params = None
+  _fork_kwargs = None
+  if _all_pools is None:
+    _all_pools = []
+    atexit.register(_TerminatePools)
+  _all_pools.append(ret)
+  return ret
+
+
+def ForkAndCall(func, args):
+  """Runs |func| in a fork'ed process.
+
+  Returns:
+    A Result object (call .get() to get the return value)
+  """
+  if DISABLE_ASYNC:
+    pool = None
+    result = _ImmediateResult(func(*args))
+  else:
+    pool = _MakeProcessPool([args])  # Omit |kwargs|.
+    result = pool.apply_async(_FuncWrapper(func), (0, ))
+    pool.close()
+  return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+  """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+  Args:
+    kwargs: Common keyword arguments to be passed to |func|.
+
+  Yields the return values in order.
+  """
+  arg_tuples = list(arg_tuples)
+  if not arg_tuples:
+    return
+
+  if DISABLE_ASYNC:
+    for args in arg_tuples:
+      yield func(*args, **kwargs)
+    return
+
+  pool = _MakeProcessPool(arg_tuples, **kwargs)
+  wrapped_func = _FuncWrapper(func)
+  try:
+    for result in pool.imap(wrapped_func, range(len(arg_tuples))):
+      _CheckForException(result)
+      yield result
+  finally:
+    pool.close()
+    pool.join()
+    _all_pools.remove(pool)
diff --git a/src/build/android/gyp/util/protoresources.py b/src/build/android/gyp/util/protoresources.py
new file mode 100644
index 0000000..272574f
--- /dev/null
+++ b/src/build/android/gyp/util/protoresources.py
@@ -0,0 +1,308 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions that modify resources in protobuf format.
+
+Format reference:
+https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto
+"""
+
+import logging
+import os
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+from util import resource_utils
+
+sys.path[1:1] = [
+    # `Resources_pb2` module imports `descriptor`, which imports `six`.
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'),
+    # Make sure the pb2 files are able to import google.protobuf
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
+                 'python'),
+]
+
+from proto import Resources_pb2
+
+# First bytes in an .flat.arsc file.
+# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0)
+_FLAT_ARSC_HEADER = b'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
+
+# The package ID hardcoded for shared libraries. See
+# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value
+# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java.
+SHARED_LIBRARY_HARDCODED_ID = 36
+
+
+def _ProcessZip(zip_path, process_func):
+  """Filters a .zip file via: new_bytes = process_func(filename, data)."""
+  has_changes = False
+  zip_entries = []
+  with zipfile.ZipFile(zip_path) as src_zip:
+    for info in src_zip.infolist():
+      data = src_zip.read(info)
+      new_data = process_func(info.filename, data)
+      if new_data is not data:
+        has_changes = True
+        data = new_data
+      zip_entries.append((info, data))
+
+  # Overwrite the original zip file.
+  if has_changes:
+    with zipfile.ZipFile(zip_path, 'w') as f:
+      for info, data in zip_entries:
+        f.writestr(info, data)
+
+
+def _ProcessProtoItem(item):
+  if not item.HasField('ref'):
+    return
+
+  # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
+  # the package to SHARED_LIBRARY_HARDCODED_ID.
+  if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id
+                                                                 & 0xff000000):
+    item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID)
+    item.ref.ClearField('is_dynamic')
+
+
+def _ProcessProtoValue(value):
+  if value.HasField('item'):
+    _ProcessProtoItem(value.item)
+    return
+
+  compound_value = value.compound_value
+  if compound_value.HasField('style'):
+    for entry in compound_value.style.entry:
+      _ProcessProtoItem(entry.item)
+  elif compound_value.HasField('array'):
+    for element in compound_value.array.element:
+      _ProcessProtoItem(element.item)
+  elif compound_value.HasField('plural'):
+    for entry in compound_value.plural.entry:
+      _ProcessProtoItem(entry.item)
+
+
+def _ProcessProtoXmlNode(xml_node):
+  if not xml_node.HasField('element'):
+    return
+
+  for attribute in xml_node.element.attribute:
+    _ProcessProtoItem(attribute.compiled_item)
+
+  for child in xml_node.element.child:
+    _ProcessProtoXmlNode(child)
+
+
+def _SplitLocaleResourceType(_type, allowed_resource_names):
+  """Splits locale specific resources out of |_type| and returns them.
+
+  Any locale specific resources will be removed from |_type|, and a new
+  Resources_pb2.Type value will be returned which contains those resources.
+
+  Args:
+    _type: A Resources_pb2.Type value
+    allowed_resource_names: Names of locale resources that should be kept in the
+        main type.
+  """
+  locale_entries = []
+  for entry in _type.entry:
+    if entry.name in allowed_resource_names:
+      continue
+
+    # First collect all resources values with a locale set.
+    config_values_with_locale = []
+    for config_value in entry.config_value:
+      if config_value.config.locale:
+        config_values_with_locale.append(config_value)
+
+    if config_values_with_locale:
+      # Remove the locale resources from the original entry
+      for value in config_values_with_locale:
+        entry.config_value.remove(value)
+
+      # Add locale resources to a new Entry, and save for later.
+      locale_entry = Resources_pb2.Entry()
+      locale_entry.CopyFrom(entry)
+      del locale_entry.config_value[:]
+      locale_entry.config_value.extend(config_values_with_locale)
+      locale_entries.append(locale_entry)
+
+  if not locale_entries:
+    return None
+
+  # Copy the original type and replace the entries with |locale_entries|.
+  locale_type = Resources_pb2.Type()
+  locale_type.CopyFrom(_type)
+  del locale_type.entry[:]
+  locale_type.entry.extend(locale_entries)
+  return locale_type
+
+
+def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist):
+  translations_package = None
+  if is_bundle_module:
+    # A separate top level package will be added to the resources, which
+    # contains only locale specific resources. The package ID of the locale
+    # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes
+    # resources in locale splits to all get assigned
+    # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug
+    # in shared library bundles where each split APK gets a separate dynamic
+    # ID, and cannot be accessed by the main APK.
+    translations_package = Resources_pb2.Package()
+    translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID
+    translations_package.package_name = (table.package[0].package_name +
+                                         '_translations')
+
+    # These resources are allowed in the base resources, since they are needed
+    # by WebView.
+    allowed_resource_names = set()
+    if shared_resources_allowlist:
+      allowed_resource_names = set(
+          resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist))
+
+  for package in table.package:
+    for _type in package.type:
+      for entry in _type.entry:
+        for config_value in entry.config_value:
+          _ProcessProtoValue(config_value.value)
+
+      if translations_package is not None:
+        locale_type = _SplitLocaleResourceType(_type, allowed_resource_names)
+        if locale_type:
+          translations_package.type.add().CopyFrom(locale_type)
+
+  if translations_package is not None:
+    table.package.add().CopyFrom(translations_package)
+
+
+def HardcodeSharedLibraryDynamicAttributes(zip_path,
+                                           is_bundle_module,
+                                           shared_resources_allowlist=None):
+  """Hardcodes the package IDs of dynamic attributes and locale resources.
+
+  Hardcoding dynamic attribute package IDs is a workaround for b/147674078,
+  which affects Android versions pre-N. Hardcoding locale resource package IDs
+  is a workaround for b/155437035, which affects resources built with
+  --shared-lib on all Android versions
+
+  Args:
+    zip_path: Path to proto APK file.
+    is_bundle_module: True for bundle modules.
+    shared_resources_allowlist: Set of resource names to not extract out of the
+        main package.
+  """
+
+  def process_func(filename, data):
+    if filename == 'resources.pb':
+      table = Resources_pb2.ResourceTable()
+      table.ParseFromString(data)
+      _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist)
+      data = table.SerializeToString()
+    elif filename.endswith('.xml') and not filename.startswith('res/raw'):
+      xml_node = Resources_pb2.XmlNode()
+      xml_node.ParseFromString(data)
+      _ProcessProtoXmlNode(xml_node)
+      data = xml_node.SerializeToString()
+    return data
+
+  _ProcessZip(zip_path, process_func)
+
+
+class _ResourceStripper(object):
+  def __init__(self, partial_path, keep_predicate):
+    self.partial_path = partial_path
+    self.keep_predicate = keep_predicate
+    self._has_changes = False
+
+  @staticmethod
+  def _IterStyles(entry):
+    for config_value in entry.config_value:
+      value = config_value.value
+      if value.HasField('compound_value'):
+        compound_value = value.compound_value
+        if compound_value.HasField('style'):
+          yield compound_value.style
+
+  def _StripStyles(self, entry, type_and_name):
+    # Strip style entries that refer to attributes that have been stripped.
+    for style in self._IterStyles(entry):
+      entries = style.entry
+      new_entries = []
+      for entry in entries:
+        full_name = '{}/{}'.format(type_and_name, entry.key.name)
+        if not self.keep_predicate(full_name):
+          logging.debug('Stripped %s/%s', self.partial_path, full_name)
+        else:
+          new_entries.append(entry)
+
+      if len(new_entries) != len(entries):
+        self._has_changes = True
+        del entries[:]
+        entries.extend(new_entries)
+
+  def _StripEntries(self, entries, type_name):
+    new_entries = []
+    for entry in entries:
+      type_and_name = '{}/{}'.format(type_name, entry.name)
+      if not self.keep_predicate(type_and_name):
+        logging.debug('Stripped %s/%s', self.partial_path, type_and_name)
+      else:
+        new_entries.append(entry)
+        self._StripStyles(entry, type_and_name)
+
+    if len(new_entries) != len(entries):
+      self._has_changes = True
+      del entries[:]
+      entries.extend(new_entries)
+
+  def StripTable(self, table):
+    self._has_changes = False
+    for package in table.package:
+      for _type in package.type:
+        self._StripEntries(_type.entry, _type.name)
+    return self._has_changes
+
+
+def _TableFromFlatBytes(data):
+  # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp
+  size_idx = len(_FLAT_ARSC_HEADER)
+  proto_idx = size_idx + 8
+  if data[:size_idx] != _FLAT_ARSC_HEADER:
+    raise Exception('Error parsing {} in {}'.format(info.filename, zip_path))
+  # Size is stored as uint64.
+  size = struct.unpack('<Q', data[size_idx:proto_idx])[0]
+  table = Resources_pb2.ResourceTable()
+  proto_bytes = data[proto_idx:proto_idx + size]
+  table.ParseFromString(proto_bytes)
+  return table
+
+
+def _FlatBytesFromTable(table):
+  proto_bytes = table.SerializeToString()
+  size = struct.pack('<Q', len(proto_bytes))
+  overage = len(proto_bytes) % 4
+  padding = b'\0' * (4 - overage) if overage else b''
+  return b''.join((_FLAT_ARSC_HEADER, size, proto_bytes, padding))
+
+
+def StripUnwantedResources(partial_path, keep_predicate):
+  """Removes resources from .arsc.flat files inside of a .zip.
+
+  Args:
+    partial_path: Path to a .zip containing .arsc.flat entries
+    keep_predicate: Given "$partial_path/$res_type/$res_name", returns
+      whether to keep the resource.
+  """
+  stripper = _ResourceStripper(partial_path, keep_predicate)
+
+  def process_file(filename, data):
+    if filename.endswith('.arsc.flat'):
+      table = _TableFromFlatBytes(data)
+      if stripper.StripTable(table):
+        data = _FlatBytesFromTable(table)
+    return data
+
+  _ProcessZip(partial_path, process_file)
diff --git a/src/build/android/gyp/util/resource_utils.py b/src/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000..263b7c2
--- /dev/null
+++ b/src/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,1066 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+    'es-419': 'es-rUS',
+    'sr-Latn': 'b+sr+Latn',
+    'fil': 'tl',
+    'he': 'iw',
+    'id': 'in',
+    'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+    'tl': 'fil',
+    'iw': 'he',
+    'in': 'id',
+    'ji': 'yi',
+    'no': 'nb',  # 'no' is not a real language. http://crbug.com/920960
+}
+
+_ALL_RESOURCE_TYPES = {
+    'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable',
+    'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'menu',
+    'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable', 'transition',
+    'xml'
+}
+
+AAPT_IGNORE_PATTERN = ':'.join([
+    '*OWNERS',  # Allow OWNERS files within res/
+    'DIR_METADATA', # Allow DIR_METADATA files within res/
+    '*.py',  # PRESUBMIT.py sometimes exist.
+    '*.pyc',
+    '*~',  # Some editors create these as temp files.
+    '.*',  # Never makes sense to include dot(files/dirs).
+    '*.d.stamp',  # Ignore stamp files
+    '*.backup',  # Some tools create temporary backup files.
+])
+
+MULTIPLE_RES_MAGIC_STRING = b'magic'
+
+
+def ToAndroidLocaleName(chromium_locale):
+  """Convert a Chromium locale name into a corresponding Android one."""
+  # Should be in sync with build/config/locales.gni.
+  # First handle the special cases, these are needed to deal with Android
+  # releases *before* 5.0/Lollipop.
+  android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+  if android_locale:
+    return android_locale
+
+  # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+  # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+  # and region is a capitalized locale region name.
+  lang, _, region = chromium_locale.partition('-')
+  if not region:
+    return lang
+
+  # Translate newer language tags into obsolete ones. Only necessary if
+  #  region is not None (e.g. 'he-IL' -> 'iw-rIL')
+  lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+  # Using '<lang>-r<region>' is now acceptable as a locale name for all
+  # versions of Android.
+  return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags.
+#  e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+
+def ToChromiumLocaleName(android_locale):
+  """Convert an Android locale name into a Chromium one."""
+  lang = None
+  region = None
+  script = None
+  m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+  if m:
+    lang = m.group(1)
+    if m.group(2):
+      region = m.group(3)
+  elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale):
+    # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS)
+    tags = android_locale.split('+')
+
+    # The Lang tag is always the first tag.
+    lang = tags[1]
+
+    # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2.
+    # The optional script tag is 4ALPHA and always in pos 1.
+    optional_tags = iter(tags[2:])
+
+    next_tag = next(optional_tags, None)
+    if next_tag and len(next_tag) == 4:
+      script = next_tag
+      next_tag = next(optional_tags, None)
+    if next_tag and len(next_tag) < 4:
+      region = next_tag
+
+  if not lang:
+    return None
+
+  # Special case for es-rUS -> es-419
+  if lang == 'es' and region == 'US':
+    return 'es-419'
+
+  lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+
+  if script:
+    lang = '%s-%s' % (lang, script)
+
+  if not region:
+    return lang
+
+  return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+  """Returns true if |string| is a valid Android resource locale qualifier."""
+  return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+          or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+  """Return Android locale name of a string resource file path.
+
+  Args:
+    file_path: A file path.
+  Returns:
+    If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+    the value of <locale> (and Android locale qualifier). Otherwise return None.
+  """
+  if not file_path.endswith('.xml'):
+    return None
+  prefix = 'values-'
+  dir_name = os.path.basename(os.path.dirname(file_path))
+  if not dir_name.startswith(prefix):
+    return None
+  qualifier = dir_name[len(prefix):]
+  return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+  """Convert a list of Chromium locales into the corresponding Android list."""
+  return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+    ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _GenerateGlobs(pattern):
+  # This function processes the aapt ignore assets pattern into a list of globs
+  # to be used to exclude files using build_utils.MatchesGlob. It removes the
+  # '!', which is used by aapt to mean 'not chatty' so it does not output if the
+  # file is ignored (we dont output anyways, so it is not required). This
+  # function does not handle the <dir> and <file> prefixes used by aapt and are
+  # assumed not to be included in the pattern string.
+  return pattern.replace('!', '').split(':')
+
+
+def DeduceResourceDirsFromFileList(resource_files):
+  """Return a list of resource directories from a list of resource files."""
+  # Directory list order is important, cannot use set or other data structures
+  # that change order. This is because resource files of the same name in
+  # multiple res/ directories ellide one another (the last one passed is used).
+  # Thus the order must be maintained to prevent non-deterministic and possibly
+  # flakey builds.
+  resource_dirs = []
+  for resource_path in resource_files:
+    # Resources are always 1 directory deep under res/.
+    res_dir = os.path.dirname(os.path.dirname(resource_path))
+    if res_dir not in resource_dirs:
+      resource_dirs.append(res_dir)
+
+  # Check if any resource_dirs are children of other ones. This indicates that a
+  # file was listed that is not exactly 1 directory deep under res/.
+  # E.g.:
+  # sources = ["java/res/values/foo.xml", "java/res/README.md"]
+  # ^^ This will cause "java" to be detected as resource directory.
+  for a, b in itertools.permutations(resource_dirs, 2):
+    if not os.path.relpath(a, b).startswith('..'):
+      bad_sources = (s for s in resource_files
+                     if os.path.dirname(os.path.dirname(s)) == b)
+      msg = """\
+Resource(s) found that are not in a proper directory structure:
+  {}
+All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE"."""
+      raise Exception(msg.format('\n  '.join(bad_sources)))
+
+  return resource_dirs
+
+
+def IterResourceFilesInDirectories(directories,
+                                   ignore_pattern=AAPT_IGNORE_PATTERN):
+  globs = _GenerateGlobs(ignore_pattern)
+  for d in directories:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = f
+        parent_dir = os.path.relpath(root, d)
+        if parent_dir != '.':
+          archive_path = os.path.join(parent_dir, f)
+        path = os.path.join(root, f)
+        if build_utils.MatchesGlob(archive_path, globs):
+          continue
+        yield path, archive_path
+
+
+class ResourceInfoFile(object):
+  """Helper for building up .res.info files."""
+
+  def __init__(self):
+    # Dict of archive_path -> source_path for the current target.
+    self._entries = {}
+    # List of (old_archive_path, new_archive_path) tuples.
+    self._renames = []
+    # We don't currently support using both AddMapping and MergeInfoFile.
+    self._add_mapping_was_called = False
+
+  def AddMapping(self, archive_path, source_path):
+    """Adds a single |archive_path| -> |source_path| entry."""
+    self._add_mapping_was_called = True
+    # "values/" files do not end up in the apk except through resources.arsc.
+    if archive_path.startswith('values'):
+      return
+    source_path = os.path.normpath(source_path)
+    new_value = self._entries.setdefault(archive_path, source_path)
+    if new_value != source_path:
+      raise Exception('Duplicate AddMapping for "{}". old={} new={}'.format(
+          archive_path, new_value, source_path))
+
+  def RegisterRename(self, old_archive_path, new_archive_path):
+    """Records an archive_path rename.
+
+    |old_archive_path| does not need to currently exist in the mappings. Renames
+    are buffered and replayed only when Write() is called.
+    """
+    if not old_archive_path.startswith('values'):
+      self._renames.append((old_archive_path, new_archive_path))
+
+  def MergeInfoFile(self, info_file_path):
+    """Merges the mappings from |info_file_path| into this object.
+
+    Any existing entries are overridden.
+    """
+    assert not self._add_mapping_was_called
+    # Allows clobbering, which is used when overriding resources.
+    with open(info_file_path) as f:
+      self._entries.update(l.rstrip().split('\t') for l in f)
+
+  def _ApplyRenames(self):
+    applied_renames = set()
+    ret = self._entries
+    for rename_tup in self._renames:
+      # Duplicate entries happen for resource overrides.
+      # Use a "seen" set to ensure we still error out if multiple renames
+      # happen for the same old_archive_path with different new_archive_paths.
+      if rename_tup in applied_renames:
+        continue
+      applied_renames.add(rename_tup)
+      old_archive_path, new_archive_path = rename_tup
+      ret[new_archive_path] = ret[old_archive_path]
+      del ret[old_archive_path]
+
+    self._entries = None
+    self._renames = None
+    return ret
+
+  def Write(self, info_file_path):
+    """Applies renames and writes out the file.
+
+    No other methods may be called after this.
+    """
+    entries = self._ApplyRenames()
+    lines = []
+    for archive_path, source_path in entries.items():
+      lines.append('{}\t{}\n'.format(archive_path, source_path))
+    with open(info_file_path, 'w') as info_file:
+      info_file.writelines(sorted(lines))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+  """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+  Args:
+    path: Input file path.
+    fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+      will be fixed to 0x7f.
+  Returns:
+    A list of _TextSymbolEntry instances.
+  Raises:
+    Exception: An unexpected line was detected in the input.
+  """
+  ret = []
+  with open(path) as f:
+    for line in f:
+      m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+      if not m:
+        raise Exception('Unexpected line in R.txt: %s' % line)
+      java_type, resource_type, name, value = m.groups()
+      if fix_package_ids:
+        value = _FixPackageIds(value)
+      ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+  return ret
+
+
+def _FixPackageIds(resource_value):
+  # Resource IDs for resources belonging to regular APKs have their first byte
+  # as 0x7f (package id). However with webview, since it is not a regular apk
+  # but used as a shared library, aapt is passed the --shared-resources flag
+  # which changes some of the package ids to 0x00.  This function normalises
+  # these (0x00) package ids to 0x7f, which the generated code in R.java changes
+  # to the correct package id at runtime.  resource_value is a string with
+  # either, a single value '0x12345678', or an array of values like '{
+  # 0xfedcba98, 0x01234567, 0x56789abc }'
+  return resource_value.replace('0x00', '0x7f')
+
+
+def _GetRTxtResourceNames(r_txt_path):
+  """Parse an R.txt file and extract the set of resource names from it."""
+  return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+  """Parse an R.txt file and the list of its string resource names."""
+  return sorted({
+      entry.name
+      for entry in _ParseTextSymbolsFile(r_txt_path)
+      if entry.resource_type == 'string'
+  })
+
+
+def GenerateStringResourcesAllowList(module_r_txt_path, allowlist_r_txt_path):
+  """Generate a allowlist of string resource IDs.
+
+  Args:
+    module_r_txt_path: Input base module R.txt path.
+    allowlist_r_txt_path: Input allowlist R.txt path.
+  Returns:
+    A dictionary mapping numerical resource IDs to the corresponding
+    string resource names. The ID values are taken from string resources in
+    |module_r_txt_path| that are also listed by name in |allowlist_r_txt_path|.
+  """
+  allowlisted_names = {
+      entry.name
+      for entry in _ParseTextSymbolsFile(allowlist_r_txt_path)
+      if entry.resource_type == 'string'
+  }
+  return {
+      int(entry.value, 0): entry.name
+      for entry in _ParseTextSymbolsFile(module_r_txt_path)
+      if entry.resource_type == 'string' and entry.name in allowlisted_names
+  }
+
+
+class RJavaBuildOptions:
+  """A class used to model the various ways to build an R.java file.
+
+  This is used to control which resource ID variables will be final or
+  non-final, and whether an onResourcesLoaded() method will be generated
+  to adjust the non-final ones, when the corresponding library is loaded
+  at runtime.
+
+  Note that by default, all resources are final, and there is no
+  method generated, which corresponds to calling ExportNoResources().
+  """
+  def __init__(self):
+    self.has_constant_ids = True
+    self.resources_allowlist = None
+    self.has_on_resources_loaded = False
+    self.export_const_styleable = False
+    self.final_package_id = None
+    self.fake_on_resources_loaded = False
+
+  def ExportNoResources(self):
+    """Make all resource IDs final, and don't generate a method."""
+    self.has_constant_ids = True
+    self.resources_allowlist = None
+    self.has_on_resources_loaded = False
+    self.export_const_styleable = False
+
+  def ExportAllResources(self):
+    """Make all resource IDs non-final in the R.java file."""
+    self.has_constant_ids = False
+    self.resources_allowlist = None
+
+  def ExportSomeResources(self, r_txt_file_path):
+    """Only select specific resource IDs to be non-final.
+
+    Args:
+      r_txt_file_path: The path to an R.txt file. All resources named
+        int it will be non-final in the generated R.java file, all others
+        will be final.
+    """
+    self.has_constant_ids = True
+    self.resources_allowlist = _GetRTxtResourceNames(r_txt_file_path)
+
+  def ExportAllStyleables(self):
+    """Make all styleable constants non-final, even non-resources ones.
+
+    Resources that are styleable but not of int[] type are not actually
+    resource IDs but constants. By default they are always final. Call this
+    method to make them non-final anyway in the final R.java file.
+    """
+    self.export_const_styleable = True
+
+  def GenerateOnResourcesLoaded(self, fake=False):
+    """Generate an onResourcesLoaded() method.
+
+    This Java method will be called at runtime by the framework when
+    the corresponding library (which includes the R.java source file)
+    will be loaded at runtime. This corresponds to the --shared-resources
+    or --app-as-shared-lib flags of 'aapt package'.
+
+    if |fake|, then the method will be empty bodied to compile faster. This
+    useful for dummy R.java files that will eventually be replaced by real
+    ones.
+    """
+    self.has_on_resources_loaded = True
+    self.fake_on_resources_loaded = fake
+
+  def SetFinalPackageId(self, package_id):
+    """Sets a package ID to be used for resources marked final."""
+    self.final_package_id = package_id
+
+  def _MaybeRewriteRTxtPackageIds(self, r_txt_path):
+    """Rewrites package IDs in the R.txt file if necessary.
+
+    If SetFinalPackageId() was called, some of the resource IDs may have had
+    their package ID changed. This function rewrites the R.txt file to match
+    those changes.
+    """
+    if self.final_package_id is None:
+      return
+
+    entries = _ParseTextSymbolsFile(r_txt_path)
+    with open(r_txt_path, 'w') as f:
+      for entry in entries:
+        value = entry.value
+        if self._IsResourceFinal(entry):
+          value = re.sub(r'0x(?:00|7f)',
+                         '0x{:02x}'.format(self.final_package_id), value)
+        f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type,
+                                       entry.name, value))
+
+  def _IsResourceFinal(self, entry):
+    """Determines whether a resource should be final or not.
+
+  Args:
+    entry: A _TextSymbolEntry instance.
+  Returns:
+    True iff the corresponding entry should be final.
+  """
+    if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+      # A styleable constant may be exported as non-final after all.
+      return not self.export_const_styleable
+    elif not self.has_constant_ids:
+      # Every resource is non-final
+      return False
+    elif not self.resources_allowlist:
+      # No allowlist means all IDs are non-final.
+      return True
+    else:
+      # Otherwise, only those in the
+      return entry.name not in self.resources_allowlist
+
+
+def CreateRJavaFiles(srcjar_dir,
+                     package,
+                     main_r_txt_file,
+                     extra_res_packages,
+                     rjava_build_options,
+                     srcjar_out,
+                     custom_root_package_name=None,
+                     grandparent_custom_package_name=None,
+                     extra_main_r_text_files=None,
+                     ignore_mismatched_values=False):
+  """Create all R.java files for a set of packages and R.txt files.
+
+  Args:
+    srcjar_dir: The top-level output directory for the generated files.
+    package: Package name for R java source files which will inherit
+      from the root R java file.
+    main_r_txt_file: The main R.txt file containing the valid values
+      of _all_ resource IDs.
+    extra_res_packages: A list of extra package names.
+    rjava_build_options: An RJavaBuildOptions instance that controls how
+      exactly the R.java file is generated.
+    srcjar_out: Path of desired output srcjar.
+    custom_root_package_name: Custom package name for module root R.java file,
+      (eg. vr for gen.vr package).
+    grandparent_custom_package_name: Custom root package name for the root
+      R.java file to inherit from. DFM root R.java files will have "base"
+      as the grandparent_custom_package_name. The format of this package name
+      is identical to custom_root_package_name.
+      (eg. for vr grandparent_custom_package_name would be "base")
+    extra_main_r_text_files: R.txt files to be added to the root R.java file.
+    ignore_mismatched_values: If True, ignores if a resource appears multiple
+      times with different entry values (useful when all the values are
+      dummy anyways).
+  Raises:
+    Exception if a package name appears several times in |extra_res_packages|
+  """
+  rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file)
+
+  packages = list(extra_res_packages)
+
+  if package and package not in packages:
+    # Sometimes, an apk target and a resources target share the same
+    # AndroidManifest.xml and thus |package| will already be in |packages|.
+    packages.append(package)
+
+  # Map of (resource_type, name) -> Entry.
+  # Contains the correct values for resources.
+  all_resources = {}
+  all_resources_by_type = collections.defaultdict(list)
+
+  main_r_text_files = [main_r_txt_file]
+  if extra_main_r_text_files:
+    main_r_text_files.extend(extra_main_r_text_files)
+  for r_txt_file in main_r_text_files:
+    for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True):
+      entry_key = (entry.resource_type, entry.name)
+      if entry_key in all_resources:
+        if not ignore_mismatched_values:
+          assert entry == all_resources[entry_key], (
+              'Input R.txt %s provided a duplicate resource with a different '
+              'entry value. Got %s, expected %s.' %
+              (r_txt_file, entry, all_resources[entry_key]))
+      else:
+        all_resources[entry_key] = entry
+        all_resources_by_type[entry.resource_type].append(entry)
+        assert entry.resource_type in _ALL_RESOURCE_TYPES, (
+            'Unknown resource type: %s, add to _ALL_RESOURCE_TYPES!' %
+            entry.resource_type)
+
+  if custom_root_package_name:
+    # Custom package name is available, thus use it for root_r_java_package.
+    root_r_java_package = GetCustomPackagePath(custom_root_package_name)
+  else:
+    # Create a unique name using srcjar_out. Underscores are added to ensure
+    # no reserved keywords are used for directory names.
+    root_r_java_package = re.sub('[^\w\.]', '', srcjar_out.replace('/', '._'))
+
+  root_r_java_dir = os.path.join(srcjar_dir, *root_r_java_package.split('.'))
+  build_utils.MakeDirectory(root_r_java_dir)
+  root_r_java_path = os.path.join(root_r_java_dir, 'R.java')
+  root_java_file_contents = _RenderRootRJavaSource(
+      root_r_java_package, all_resources_by_type, rjava_build_options,
+      grandparent_custom_package_name)
+  with open(root_r_java_path, 'w') as f:
+    f.write(root_java_file_contents)
+
+  for package in packages:
+    _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+                           rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+                           rjava_build_options):
+  """Generates an R.java source file."""
+  package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+  build_utils.MakeDirectory(package_r_java_dir)
+  package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+  java_file_contents = _RenderRJavaSource(package, root_r_java_package,
+                                          rjava_build_options)
+  with open(package_r_java_path, 'w') as f:
+    f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+  """Get the index of the first application resource ID within a resource
+  array."""
+  res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+  for i, res_id in enumerate(res_ids):
+    if res_id.startswith('0x7f'):
+      return i
+  return len(res_ids)
+
+
+def _RenderRJavaSource(package, root_r_java_package, rjava_build_options):
+  """Generates the contents of a R.java file."""
+  template = Template(
+      """/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resource_types %}
+    public static final class {{ resource_type }} extends
+            {{ root_package }}.R.{{ resource_type }} {}
+    {% endfor %}
+    {% if has_on_resources_loaded %}
+    public static void onResourcesLoaded(int packageId) {
+        {{ root_package }}.R.onResourcesLoaded(packageId);
+    }
+    {% endif %}
+}
+""",
+      trim_blocks=True,
+      lstrip_blocks=True)
+
+  return template.render(
+      package=package,
+      resource_types=sorted(_ALL_RESOURCE_TYPES),
+      root_package=root_r_java_package,
+      has_on_resources_loaded=rjava_build_options.has_on_resources_loaded)
+
+
+def GetCustomPackagePath(package_name):
+  return 'gen.' + package_name + '_module'
+
+
+def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
+                           grandparent_custom_package_name):
+  """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+  final_resources_by_type = collections.defaultdict(list)
+  non_final_resources_by_type = collections.defaultdict(list)
+  for res_type, resources in all_resources_by_type.items():
+    for entry in resources:
+      # Entries in stylable that are not int[] are not actually resource ids
+      # but constants.
+      if rjava_build_options._IsResourceFinal(entry):
+        final_resources_by_type[res_type].append(entry)
+      else:
+        non_final_resources_by_type[res_type].append(entry)
+
+  # Keep these assignments all on one line to make diffing against regular
+  # aapt-generated files easier.
+  create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
+  create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
+                   ' packageIdTransform;')
+  for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
+                        '{{ e.resource_type }}.{{ e.name }}.length; ++i')
+
+  # Here we diverge from what aapt does. Because we have so many
+  # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+  # Java imposes. For this reason we split onResourcesLoaded into different
+  # methods for each resource type.
+  extends_string = ''
+  dep_path = ''
+  if grandparent_custom_package_name:
+    extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} '
+    dep_path = GetCustomPackagePath(grandparent_custom_package_name)
+
+  template = Template("""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resource_types %}
+    public static class {{ resource_type }} """ + extends_string + """ {
+        {% for e in final_resources[resource_type] %}
+        public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+        {% endfor %}
+        {% for e in non_final_resources[resource_type] %}
+            {% if e.value != '0' %}
+        public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+            {% else %}
+        public static {{ e.java_type }} {{ e.name }};
+            {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+    {% if has_on_resources_loaded %}
+      {% if fake_on_resources_loaded %}
+    public static void onResourcesLoaded(int packageId) {
+    }
+      {% else %}
+    private static boolean sResourcesDidLoad;
+    public static void onResourcesLoaded(int packageId) {
+        if (sResourcesDidLoad) {
+            return;
+        }
+        sResourcesDidLoad = true;
+        int packageIdTransform = (packageId ^ 0x7f) << 24;
+        {% for resource_type in resource_types %}
+        onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+        {% for e in non_final_resources[resource_type] %}
+        {% if e.java_type == 'int[]' %}
+        for(""" + for_loop_condition + """) {
+            """ + create_id_arr + """
+        }
+        {% endif %}
+        {% endfor %}
+        {% endfor %}
+    }
+    {% for res_type in resource_types %}
+    private static void onResourcesLoaded{{ res_type|title }} (
+            int packageIdTransform) {
+        {% for e in non_final_resources[res_type] %}
+        {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+        """ + create_id + """
+        {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+      {% endif %}
+    {% endif %}
+}
+""",
+                      trim_blocks=True,
+                      lstrip_blocks=True)
+  return template.render(
+      package=package,
+      resource_types=sorted(_ALL_RESOURCE_TYPES),
+      has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+      fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded,
+      final_resources=final_resources_by_type,
+      non_final_resources=non_final_resources_by_type,
+      startIndex=_GetNonSystemIndex,
+      parent_path=dep_path)
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+  """Returns (version_code, version_name, package_name) for the given apk."""
+  output = subprocess.check_output([
+      aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+  ]).decode('utf-8')
+  version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+  version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+  package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+  return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+  """Returns (package_name, package_id) of resources.arsc from apk_path."""
+  proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE)
+  for line in proc.stdout:
+    line = line.decode('utf-8')
+    # Package name=org.chromium.webview_shell id=7f
+    if line.startswith('Package'):
+      proc.kill()
+      parts = line.split()
+      package_name = parts[1].split('=')[1]
+      package_id = parts[2][3:]
+      return package_name, int(package_id, 16)
+
+  # aapt2 currently crashes when dumping webview resources, but not until after
+  # it prints the "Package" line (b/130553900).
+  sys.stderr.write(proc.stderr.read())
+  raise Exception('Failed to find arsc package name')
+
+
+def _RenameSubdirsWithPrefix(dir_path, prefix):
+  subdirs = [
+      d for d in os.listdir(dir_path)
+      if os.path.isdir(os.path.join(dir_path, d))
+  ]
+  renamed_subdirs = []
+  for d in subdirs:
+    old_path = os.path.join(dir_path, d)
+    new_path = os.path.join(dir_path, '{}_{}'.format(prefix, d))
+    renamed_subdirs.append(new_path)
+    os.rename(old_path, new_path)
+  return renamed_subdirs
+
+
+def _HasMultipleResDirs(zip_path):
+  """Checks for magic comment set by prepare_resources.py
+
+  Returns: True iff the zipfile has the magic comment that means it contains
+  multiple res/ dirs inside instead of just contents of a single res/ dir
+  (without a wrapping res/).
+  """
+  with zipfile.ZipFile(zip_path) as z:
+    return z.comment == MULTIPLE_RES_MAGIC_STRING
+
+
+def ExtractDeps(dep_zips, deps_dir):
+  """Extract a list of resource dependency zip files.
+
+  Args:
+     dep_zips: A list of zip file paths, each one will be extracted to
+       a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+       '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+    deps_dir: Top-level extraction directory.
+  Returns:
+    The list of all sub-directory paths, relative to |deps_dir|.
+  Raises:
+    Exception: If a sub-directory already exists with the same name before
+      extraction.
+  """
+  dep_subdirs = []
+  for z in dep_zips:
+    subdirname = z.replace(os.path.sep, '_')
+    subdir = os.path.join(deps_dir, subdirname)
+    if os.path.exists(subdir):
+      raise Exception('Resource zip name conflict: ' + subdirname)
+    build_utils.ExtractAll(z, path=subdir)
+    if _HasMultipleResDirs(z):
+      # basename of the directory is used to create a zip during resource
+      # compilation, include the path in the basename to help blame errors on
+      # the correct target. For example directory 0_res may be renamed
+      # chrome_android_chrome_app_java_resources_0_res pointing to the name and
+      # path of the android_resources target from whence it came.
+      subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname)
+      dep_subdirs.extend(subdir_subdirs)
+    else:
+      dep_subdirs.append(subdir)
+  return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+  """A temporary directory for packaging and compiling Android resources.
+
+  Args:
+    temp_dir: Optional root build directory path. If None, a temporary
+      directory will be created, and removed in Close().
+  """
+
+  def __init__(self, temp_dir=None, keep_files=False):
+    """Initialized the context."""
+    # The top-level temporary directory.
+    if temp_dir:
+      self.temp_dir = temp_dir
+      os.makedirs(temp_dir)
+    else:
+      self.temp_dir = tempfile.mkdtemp()
+    self.remove_on_exit = not keep_files
+
+    # A location to store resources extracted form dependency zip files.
+    self.deps_dir = os.path.join(self.temp_dir, 'deps')
+    os.mkdir(self.deps_dir)
+    # A location to place aapt-generated files.
+    self.gen_dir = os.path.join(self.temp_dir, 'gen')
+    os.mkdir(self.gen_dir)
+    # A location to place generated R.java files.
+    self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+    os.mkdir(self.srcjar_dir)
+    # Temporary file locacations.
+    self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+    self.srcjar_path = os.path.join(self.temp_dir, 'R.srcjar')
+    self.info_path = os.path.join(self.temp_dir, 'size.info')
+    self.stable_ids_path = os.path.join(self.temp_dir, 'in_ids.txt')
+    self.emit_ids_path = os.path.join(self.temp_dir, 'out_ids.txt')
+    self.proguard_path = os.path.join(self.temp_dir, 'keeps.flags')
+    self.proguard_main_dex_path = os.path.join(self.temp_dir, 'maindex.flags')
+    self.arsc_path = os.path.join(self.temp_dir, 'out.ap_')
+    self.proto_path = os.path.join(self.temp_dir, 'out.proto.ap_')
+    self.optimized_arsc_path = os.path.join(self.temp_dir, 'out.opt.ap_')
+    self.optimized_proto_path = os.path.join(self.temp_dir, 'out.opt.proto.ap_')
+
+  def Close(self):
+    """Close the context and destroy all temporary files."""
+    if self.remove_on_exit:
+      shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None, keep_files=False):
+  """Generator for a _ResourceBuildContext instance."""
+  context = None
+  try:
+    context = _ResourceBuildContext(temp_dir, keep_files)
+    yield context
+  finally:
+    if context:
+      context.Close()
+
+
+def ResourceArgsParser():
+  """Create an argparse.ArgumentParser instance with common argument groups.
+
+  Returns:
+    A tuple of (parser, in_group, out_group) corresponding to the parser
+    instance, and the input and output argument groups for it, respectively.
+  """
+  parser = argparse.ArgumentParser(description=__doc__)
+
+  input_opts = parser.add_argument_group('Input options')
+  output_opts = parser.add_argument_group('Output options')
+
+  build_utils.AddDepfileOption(output_opts)
+
+  input_opts.add_argument('--include-resources', required=True, action="append",
+                        help='Paths to arsc resource files used to link '
+                             'against. Can be specified multiple times.')
+
+  input_opts.add_argument('--dependencies-res-zips', required=True,
+                    help='Resources zip archives from dependents. Required to '
+                         'resolve @type/foo references into dependent '
+                         'libraries.')
+
+  input_opts.add_argument(
+      '--r-text-in',
+       help='Path to pre-existing R.txt. Its resource IDs override those found '
+            'in the aapt-generated R.txt when generating R.java.')
+
+  input_opts.add_argument(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for.')
+
+  return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+  """Handle common command-line options after parsing.
+
+  Args:
+    options: the result of parse_args() on the parser returned by
+        ResourceArgsParser(). This function updates a few common fields.
+  """
+  options.include_resources = [build_utils.ParseGnList(r) for r in
+                               options.include_resources]
+  # Flatten list of include resources list to make it easier to use.
+  options.include_resources = [r for resources in options.include_resources
+                               for r in resources]
+
+  options.dependencies_res_zips = (
+      build_utils.ParseGnList(options.dependencies_res_zips))
+
+  # Don't use [] as default value since some script explicitly pass "".
+  if options.extra_res_packages:
+    options.extra_res_packages = (
+        build_utils.ParseGnList(options.extra_res_packages))
+  else:
+    options.extra_res_packages = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+  """Parse and Android xml resource file and extract strings from it.
+
+  Args:
+    xml_data: XML file data.
+  Returns:
+    A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+    encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+    corresponding to namespaces declared in the <resources> element.
+  """
+  # NOTE: This uses regular expression matching because parsing with something
+  # like ElementTree makes it tedious to properly parse some of the structured
+  # text found in string resources, e.g.:
+  #      <string msgid="3300176832234831527" \
+  #         name="abc_shareactionprovider_share_with_application">\
+  #             "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+  #      </string>
+  result = {}
+
+  # Find <resources> start tag and extract namespaces from it.
+  m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+  if not m:
+    raise Exception('<resources> start tag expected: ' + xml_data)
+  input_data = xml_data[m.end():]
+  resource_attrs = m.group(1)
+  re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+  namespaces = {}
+  while resource_attrs:
+    m = re_namespace.match(resource_attrs)
+    if not m:
+      break
+    namespaces[m.group(2)] = m.group(3)
+    resource_attrs = resource_attrs[m.end(1):]
+
+  # Find each string element now.
+  re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+  re_string_element_end = re.compile('</string>')
+  while input_data:
+    m = re_string_element_start.search(input_data)
+    if not m:
+      break
+    name = m.group(2)
+    input_data = input_data[m.end():]
+    m2 = re_string_element_end.search(input_data)
+    if not m2:
+      raise Exception('Expected closing string tag: ' + input_data)
+    text = input_data[:m2.start()]
+    input_data = input_data[m2.end():]
+    if len(text) and text[0] == '"' and text[-1] == '"':
+      text = text[1:-1]
+    result[name] = text
+
+  return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+  """Generate an XML text corresponding to an Android resource strings map.
+
+  Args:
+    names_to_text: A dictionary mapping resource names to localized
+      text (encoded as UTF-8).
+    namespaces: A map of namespace prefix to URL.
+  Returns:
+    New non-Unicode string containing an XML data structure describing the
+    input as an Android resource .xml file.
+  """
+  result = '<?xml version="1.0" encoding="utf-8"?>\n'
+  result += '<resources'
+  if namespaces:
+    for prefix, url in sorted(namespaces.items()):
+      result += ' xmlns:%s="%s"' % (prefix, url)
+  result += '>\n'
+  if not names_to_utf8_text:
+    result += '<!-- this file intentionally empty -->\n'
+  else:
+    for name, utf8_text in sorted(names_to_utf8_text.items()):
+      result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+  result += '</resources>\n'
+  return result.encode('utf8')
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+  """Remove unwanted localized strings from an Android resource .xml file.
+
+  This function takes a |string_predicate| callable object that will
+  receive a resource string name, and should return True iff the
+  corresponding <string> element should be kept in the file.
+
+  Args:
+    xml_file_path: Android resource strings xml file path.
+    string_predicate: A predicate function which will receive the string name
+      and shal
+  """
+  with open(xml_file_path) as f:
+    xml_data = f.read()
+  strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+  string_deletion = False
+  for name in list(strings_map.keys()):
+    if not string_predicate(name):
+      del strings_map[name]
+      string_deletion = True
+
+  if string_deletion:
+    new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+    with open(xml_file_path, 'wb') as f:
+      f.write(new_xml_data)
diff --git a/src/build/android/gyp/util/resource_utils_test.py b/src/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000..62d5b43
--- /dev/null
+++ b/src/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils  # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+    'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+    'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+    'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+    'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_ALLOWLIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test allowlist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_ALLOWLIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+    0x7f0c0105: 'AllowedDomainsForAppsDesc',
+    0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+    'AllowedDomainsForAppsDesc',
+    'AllowedDomainsForAppsTitle',
+    'AlternateErrorPagesEnabledDesc',
+    'AlternateErrorPagesEnabledTitle',
+    'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+  file_path = os.path.join(tmp_dir, file_name)
+  with open(file_path, 'wt') as f:
+    f.write(file_data)
+  return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+  def test_GetRTxtStringResourceNames(self):
+    with build_utils.TempDir() as tmp_dir:
+      tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+      self.assertListEqual(
+          resource_utils.GetRTxtStringResourceNames(tmp_file),
+          _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+  def test_GenerateStringResourcesAllowList(self):
+    with build_utils.TempDir() as tmp_dir:
+      tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+      tmp_allowlist_rtxt_file = _CreateTestFile(tmp_dir, "test_allowlist_R.txt",
+                                                _TEST_ALLOWLIST_R_TXT)
+      self.assertDictEqual(
+          resource_utils.GenerateStringResourcesAllowList(
+              tmp_module_rtxt_file, tmp_allowlist_rtxt_file),
+          _TEST_R_TEXT_RESOURCES_IDS)
+
+  def test_IsAndroidLocaleQualifier(self):
+    good_locales = [
+        'en',
+        'en-rUS',
+        'fil',
+        'fil-rPH',
+        'iw',
+        'iw-rIL',
+        'b+en',
+        'b+en+US',
+        'b+ja+Latn',
+        'b+ja+JP+Latn',
+        'b+cmn+Hant-TW',
+    ]
+    bad_locales = [
+        'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+    ]
+    for locale in good_locales:
+      self.assertTrue(
+          resource_utils.IsAndroidLocaleQualifier(locale),
+          msg="'%s' should be a good locale!" % locale)
+
+    for locale in bad_locales:
+      self.assertFalse(
+          resource_utils.IsAndroidLocaleQualifier(locale),
+          msg="'%s' should be a bad locale!" % locale)
+
+  def test_ToAndroidLocaleName(self):
+    _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+        'en': 'en',
+        'en-US': 'en-rUS',
+        'en-FOO': 'en-rFOO',
+        'fil': 'tl',
+        'tl': 'tl',
+        'he': 'iw',
+        'he-IL': 'iw-rIL',
+        'id': 'in',
+        'id-BAR': 'in-rBAR',
+        'nb': 'nb',
+        'yi': 'ji'
+    }
+    for chromium_locale, android_locale in \
+        _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.items():
+      result = resource_utils.ToAndroidLocaleName(chromium_locale)
+      self.assertEqual(result, android_locale)
+
+  def test_ToChromiumLocaleName(self):
+    _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+        'foo': 'foo',
+        'foo-rBAR': 'foo-BAR',
+        'b+lll': 'lll',
+        'b+ll+Extra': 'll',
+        'b+ll+RR': 'll-RR',
+        'b+lll+RR+Extra': 'lll-RR',
+        'b+ll+RRR+Extra': 'll-RRR',
+        'b+ll+Ssss': 'll-Ssss',
+        'b+ll+Ssss+Extra': 'll-Ssss',
+        'b+ll+Ssss+RR': 'll-Ssss-RR',
+        'b+ll+Ssss+RRR': 'll-Ssss-RRR',
+        'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR',
+        'b+ll+Whatever': 'll',
+        'en': 'en',
+        'en-rUS': 'en-US',
+        'en-US': None,
+        'en-FOO': None,
+        'en-rFOO': 'en-FOO',
+        'es-rES': 'es-ES',
+        'es-rUS': 'es-419',
+        'tl': 'fil',
+        'fil': 'fil',
+        'iw': 'he',
+        'iw-rIL': 'he-IL',
+        'b+iw+IL': 'he-IL',
+        'in': 'id',
+        'in-rBAR': 'id-BAR',
+        'id-rBAR': 'id-BAR',
+        'nb': 'nb',
+        'no': 'nb',  # http://crbug.com/920960
+    }
+    for android_locale, chromium_locale in \
+        _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.items():
+      result = resource_utils.ToChromiumLocaleName(android_locale)
+      self.assertEqual(result, chromium_locale)
+
+  def test_FindLocaleInStringResourceFilePath(self):
+    self.assertEqual(
+        None,
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values/whatever.xml'))
+    self.assertEqual(
+        'foo',
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo/whatever.xml'))
+    self.assertEqual(
+        'foo-rBAR',
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo-rBAR/whatever.xml'))
+    self.assertEqual(
+        None,
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo/ignore-subdirs/whatever.xml'))
+
+  def test_ParseAndroidResourceStringsFromXml(self):
+    ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+        _TEST_XML_INPUT_1)
+    self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+    self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+  def test_GenerateAndroidResourceStringsXml(self):
+    # Fist, an empty strings map, with no namespaces
+    result = resource_utils.GenerateAndroidResourceStringsXml({})
+    self.assertEqual(result.decode('utf8'), _TEST_XML_OUTPUT_EMPTY)
+
+    result = resource_utils.GenerateAndroidResourceStringsXml(
+        _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+    self.assertEqual(result.decode('utf8'), _TEST_XML_INPUT_1)
+
+  @staticmethod
+  def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+    values_dir = os.path.join(output_dir, 'values-' + locale)
+    build_utils.MakeDirectory(values_dir)
+    file_path = os.path.join(values_dir, 'strings.xml')
+    with open(file_path, 'wb') as f:
+      file_data = resource_utils.GenerateAndroidResourceStringsXml(
+          string_map, namespaces)
+      f.write(file_data)
+    return file_path
+
+  def _CheckTestResourceFile(self, file_path, expected_data):
+    with open(file_path) as f:
+      file_data = f.read()
+    self.assertEqual(file_data, expected_data)
+
+  def test_FilterAndroidResourceStringsXml(self):
+    with build_utils.TempDir() as tmp_path:
+      test_file = self._CreateTestResourceFile(
+          tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+      resource_utils.FilterAndroidResourceStringsXml(
+          test_file, lambda x: x in _TEST_RESOURCES_ALLOWLIST_1)
+      self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/resources_parser.py b/src/build/android/gyp/util/resources_parser.py
new file mode 100644
index 0000000..8d8d69c
--- /dev/null
+++ b/src/build/android/gyp/util/resources_parser.py
@@ -0,0 +1,142 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import re
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+_TextSymbolEntry = collections.namedtuple(
+    'RTextEntry', ('java_type', 'resource_type', 'name', 'value'))
+
+_DUMMY_RTXT_ID = '0x7f010001'
+_DUMMY_RTXT_INDEX = '1'
+
+
+def _ResourceNameToJavaSymbol(resource_name):
+  return re.sub('[\.:]', '_', resource_name)
+
+
+class RTxtGenerator(object):
+  def __init__(self,
+               res_dirs,
+               ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN):
+    self.res_dirs = res_dirs
+    self.ignore_pattern = ignore_pattern
+
+  def _ParseDeclareStyleable(self, node):
+    ret = set()
+    stylable_name = _ResourceNameToJavaSymbol(node.attrib['name'])
+    ret.add(
+        _TextSymbolEntry('int[]', 'styleable', stylable_name,
+                         '{{{}}}'.format(_DUMMY_RTXT_ID)))
+    for child in node:
+      if child.tag == 'eat-comment':
+        continue
+      if child.tag != 'attr':
+        # This parser expects everything inside <declare-stylable/> to be either
+        # an attr or an eat-comment. If new resource xml files are added that do
+        # not conform to this, this parser needs updating.
+        raise Exception('Unexpected tag {} inside <delcare-stylable/>'.format(
+            child.tag))
+      entry_name = '{}_{}'.format(
+          stylable_name, _ResourceNameToJavaSymbol(child.attrib['name']))
+      ret.add(
+          _TextSymbolEntry('int', 'styleable', entry_name, _DUMMY_RTXT_INDEX))
+      if not child.attrib['name'].startswith('android:'):
+        resource_name = _ResourceNameToJavaSymbol(child.attrib['name'])
+        ret.add(_TextSymbolEntry('int', 'attr', resource_name, _DUMMY_RTXT_ID))
+      for entry in child:
+        if entry.tag not in ('enum', 'flag'):
+          # This parser expects everything inside <attr/> to be either an
+          # <enum/> or an <flag/>. If new resource xml files are added that do
+          # not conform to this, this parser needs updating.
+          raise Exception('Unexpected tag {} inside <attr/>'.format(entry.tag))
+        resource_name = _ResourceNameToJavaSymbol(entry.attrib['name'])
+        ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+    return ret
+
+  def _ExtractNewIdsFromNode(self, node):
+    ret = set()
+    # Sometimes there are @+id/ in random attributes (not just in android:id)
+    # and apparently that is valid. See:
+    # https://developer.android.com/reference/android/widget/RelativeLayout.LayoutParams.html
+    for value in node.attrib.values():
+      if value.startswith('@+id/'):
+        resource_name = value[5:]
+        ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+    for child in node:
+      ret.update(self._ExtractNewIdsFromNode(child))
+    return ret
+
+  def _ExtractNewIdsFromXml(self, xml_path):
+    root = ElementTree.parse(xml_path).getroot()
+    return self._ExtractNewIdsFromNode(root)
+
+  def _ParseValuesXml(self, xml_path):
+    ret = set()
+    root = ElementTree.parse(xml_path).getroot()
+    assert root.tag == 'resources'
+    for child in root:
+      if child.tag == 'eat-comment':
+        # eat-comment is just a dummy documentation element.
+        continue
+      if child.tag == 'skip':
+        # skip is just a dummy element.
+        continue
+      if child.tag == 'declare-styleable':
+        ret.update(self._ParseDeclareStyleable(child))
+      else:
+        if child.tag == 'item':
+          resource_type = child.attrib['type']
+        elif child.tag in ('array', 'integer-array', 'string-array'):
+          resource_type = 'array'
+        else:
+          resource_type = child.tag
+        name = _ResourceNameToJavaSymbol(child.attrib['name'])
+        ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID))
+    return ret
+
+  def _CollectResourcesListFromDirectory(self, res_dir):
+    ret = set()
+    globs = resource_utils._GenerateGlobs(self.ignore_pattern)
+    for root, _, files in os.walk(res_dir):
+      resource_type = os.path.basename(root)
+      if '-' in resource_type:
+        resource_type = resource_type[:resource_type.index('-')]
+      for f in files:
+        if build_utils.MatchesGlob(f, globs):
+          continue
+        if resource_type == 'values':
+          ret.update(self._ParseValuesXml(os.path.join(root, f)))
+        else:
+          if '.' in f:
+            resource_name = f[:f.index('.')]
+          else:
+            resource_name = f
+          ret.add(
+              _TextSymbolEntry('int', resource_type, resource_name,
+                               _DUMMY_RTXT_ID))
+          # Other types not just layouts can contain new ids (eg: Menus and
+          # Drawables). Just in case, look for new ids in all files.
+          if f.endswith('.xml'):
+            ret.update(self._ExtractNewIdsFromXml(os.path.join(root, f)))
+    return ret
+
+  def _CollectResourcesListFromDirectories(self):
+    ret = set()
+    for res_dir in self.res_dirs:
+      ret.update(self._CollectResourcesListFromDirectory(res_dir))
+    return ret
+
+  def WriteRTxtFile(self, rtxt_path):
+    resources = self._CollectResourcesListFromDirectories()
+    with build_utils.AtomicOutput(rtxt_path, mode='w') as f:
+      for resource in resources:
+        line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
+            resource)
+        f.write(line)
diff --git a/src/build/android/gyp/util/server_utils.py b/src/build/android/gyp/util/server_utils.py
new file mode 100644
index 0000000..e050ef6
--- /dev/null
+++ b/src/build/android/gyp/util/server_utils.py
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import json
+import os
+import socket
+
+# Use a unix abstract domain socket:
+# https://man7.org/linux/man-pages/man7/unix.7.html#:~:text=abstract:
+SOCKET_ADDRESS = '\0chromium_build_server_socket'
+BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER'
+
+
+def MaybeRunCommand(name, argv, stamp_file):
+  """Returns True if the command was successfully sent to the build server."""
+
+  # When the build server runs a command, it sets this environment variable.
+  # This prevents infinite recursion where the script sends a request to the
+  # build server, then the build server runs the script, and then the script
+  # sends another request to the build server.
+  if BUILD_SERVER_ENV_VARIABLE in os.environ:
+    return False
+  with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
+    try:
+      sock.connect(SOCKET_ADDRESS)
+      sock.sendall(
+          json.dumps({
+              'name': name,
+              'cmd': argv,
+              'cwd': os.getcwd(),
+              'stamp_file': stamp_file,
+          }).encode('utf8'))
+    except socket.error as e:
+      # [Errno 111] Connection refused. Either the server has not been started
+      #             or the server is not currently accepting new connections.
+      if e.errno == 111:
+        return False
+      raise e
+  return True
diff --git a/src/build/android/gyp/util/zipalign.py b/src/build/android/gyp/util/zipalign.py
new file mode 100644
index 0000000..c5c4ea8
--- /dev/null
+++ b/src/build/android/gyp/util/zipalign.py
@@ -0,0 +1,97 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+
+_FIXED_ZIP_HEADER_LEN = 30
+
+
+def _PatchedDecodeExtra(self):
+  # Try to decode the extra field.
+  extra = self.extra
+  unpack = struct.unpack
+  while len(extra) >= 4:
+    tp, ln = unpack('<HH', extra[:4])
+    if tp == 1:
+      if ln >= 24:
+        counts = unpack('<QQQ', extra[4:28])
+      elif ln == 16:
+        counts = unpack('<QQ', extra[4:20])
+      elif ln == 8:
+        counts = unpack('<Q', extra[4:12])
+      elif ln == 0:
+        counts = ()
+      else:
+        raise RuntimeError("Corrupt extra field %s" % (ln, ))
+
+      idx = 0
+
+      # ZIP64 extension (large files and/or large archives)
+      if self.file_size in (0xffffffffffffffff, 0xffffffff):
+        self.file_size = counts[idx]
+        idx += 1
+
+      if self.compress_size == 0xffffffff:
+        self.compress_size = counts[idx]
+        idx += 1
+
+      if self.header_offset == 0xffffffff:
+        self.header_offset = counts[idx]
+        idx += 1
+
+    extra = extra[ln + 4:]
+
+
+def ApplyZipFileZipAlignFix():
+  """Fix zipfile.ZipFile() to be able to open zipaligned .zip files.
+
+  Android's zip alignment uses not-quite-valid zip headers to perform alignment.
+  Python < 3.4 crashes when trying to load them.
+  https://bugs.python.org/issue14315
+  """
+  if sys.version_info < (3, 4):
+    zipfile.ZipInfo._decodeExtra = (  # pylint: disable=protected-access
+        _PatchedDecodeExtra)
+
+
+def _SetAlignment(zip_obj, zip_info, alignment):
+  """Sets a ZipInfo's extra field such that the file will be aligned.
+
+  Args:
+    zip_obj: The ZipFile object that is being written.
+    zip_info: The ZipInfo object about to be written.
+    alignment: The amount of alignment (e.g. 4, or 4*1024).
+  """
+  cur_offset = zip_obj.fp.tell()
+  header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
+  padding_needed = (alignment - (
+      (cur_offset + header_size) % alignment)) % alignment
+
+
+  # Python writes |extra| to both the local file header and the central
+  # directory's file header. Android's zipalign tool writes only to the
+  # local file header, so there is more overhead in using python to align.
+  zip_info.extra = b'\0' * padding_needed
+
+
+def AddToZipHermetic(zip_file,
+                     zip_path,
+                     src_path=None,
+                     data=None,
+                     compress=None,
+                     alignment=None):
+  """Same as build_utils.AddToZipHermetic(), but with alignment.
+
+  Args:
+    alignment: If set, align the data of the entry to this many bytes.
+  """
+  zipinfo = build_utils.HermeticZipInfo(filename=zip_path)
+  if alignment:
+    _SetAlignment(zip_file, zipinfo, alignment)
+  build_utils.AddToZipHermetic(
+      zip_file, zipinfo, src_path=src_path, data=data, compress=compress)
diff --git a/src/build/android/gyp/validate_static_library_dex_references.py b/src/build/android/gyp/validate_static_library_dex_references.py
new file mode 100755
index 0000000..b14ca3c
--- /dev/null
+++ b/src/build/android/gyp/validate_static_library_dex_references.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.dex import dex_parser
+from util import build_utils
+
+_FLAGS_PATH = (
+    '//chrome/android/java/static_library_dex_reference_workarounds.flags')
+
+
+def _FindIllegalStaticLibraryReferences(static_lib_dex_files,
+                                        main_apk_dex_files):
+  main_apk_defined_types = set()
+  for dex_file in main_apk_dex_files:
+    for class_def_item in dex_file.class_def_item_list:
+      main_apk_defined_types.add(
+          dex_file.GetTypeString(class_def_item.class_idx))
+
+  static_lib_referenced_types = set()
+  for dex_file in static_lib_dex_files:
+    for type_item in dex_file.type_item_list:
+      static_lib_referenced_types.add(
+          dex_file.GetString(type_item.descriptor_idx))
+
+  return main_apk_defined_types.intersection(static_lib_referenced_types)
+
+
+def _DexFilesFromPath(path):
+  if zipfile.is_zipfile(path):
+    with zipfile.ZipFile(path) as z:
+      return [
+          dex_parser.DexFile(bytearray(z.read(name))) for name in z.namelist()
+          if re.match(r'.*classes[0-9]*\.dex$', name)
+      ]
+  else:
+    with open(path) as f:
+      return dex_parser.DexFile(bytearray(f.read()))
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--depfile', required=True, help='Path to output depfile.')
+  parser.add_argument(
+      '--stamp', required=True, help='Path to file to touch upon success.')
+  parser.add_argument(
+      '--static-library-dex',
+      required=True,
+      help='classes.dex or classes.zip for the static library APK that was '
+      'proguarded with other dependent APKs')
+  parser.add_argument(
+      '--static-library-dependent-dex',
+      required=True,
+      action='append',
+      dest='static_library_dependent_dexes',
+      help='classes.dex or classes.zip for the APKs that use the static '
+      'library APK')
+  args = parser.parse_args(args)
+
+  static_library_dexfiles = _DexFilesFromPath(args.static_library_dex)
+  for path in args.static_library_dependent_dexes:
+    dependent_dexfiles = _DexFilesFromPath(path)
+    illegal_references = _FindIllegalStaticLibraryReferences(
+        static_library_dexfiles, dependent_dexfiles)
+
+    if illegal_references:
+      msg = 'Found illegal references from {} to {}\n'.format(
+          args.static_library_dex, path)
+      msg += 'Add a -keep rule to avoid this. '
+      msg += 'See {} for an example and why this is necessary.\n'.format(
+          _FLAGS_PATH)
+      msg += 'The illegal references are:\n'
+      msg += '\n'.join(illegal_references)
+      sys.stderr.write(msg)
+      sys.exit(1)
+
+  input_paths = [args.static_library_dex] + args.static_library_dependent_dexes
+  build_utils.Touch(args.stamp)
+  build_utils.WriteDepfile(args.depfile, args.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/validate_static_library_dex_references.pydeps b/src/build/android/gyp/validate_static_library_dex_references.pydeps
new file mode 100644
index 0000000..e57172d
--- /dev/null
+++ b/src/build/android/gyp/validate_static_library_dex_references.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/validate_static_library_dex_references.pydeps build/android/gyp/validate_static_library_dex_references.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+util/__init__.py
+util/build_utils.py
+validate_static_library_dex_references.py
diff --git a/src/build/android/gyp/write_build_config.py b/src/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..0600fdc
--- /dev/null
+++ b/src/build/android/gyp/write_build_config.py
@@ -0,0 +1,2087 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+    The following types are known by the internal GN build rules and the
+    build scripts altogether:
+
+    * [java_binary](#target_java_binary)
+    * [java_annotation_processor](#target_java_annotation_processor)
+    * [junit_binary](#target_junit_binary)
+    * [java_library](#target_java_library)
+    * [android_assets](#target_android_assets)
+    * [android_resources](#target_android_resources)
+    * [android_apk](#target_android_apk)
+    * [android_app_bundle_module](#target_android_app_bundle_module)
+    * [android_app_bundle](#target_android_app_bundle)
+    * [dist_jar](#target_dist_jar)
+    * [dist_aar](#target_dist_aar)
+    * [group](#target_group)
+
+    See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+    NOTE: Because the `.build_config` of a given target is always generated
+    after the `.build_config` of its dependencies, the `write_build_config.py`
+    script can use chains of `deps_configs` to compute transitive dependencies
+    for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+    NOTE: For `android_resources` targets,
+    this is the package name for the corresponding R class. For `android_apk`
+    targets, this is the corresponding package name. This does *not* appear for
+    other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+* `deps_info['base_module_config']`:
+Only seen for the [`android_app_bundle`](#target_android_app_bundle) type.
+Path to the base module for the bundle.
+
+* `deps_info['is_base_module']`:
+Only seen for the
+[`android_app_bundle_module`](#target_android_app_bundle_module)
+type. Whether or not this module is the base module for some bundle.
+
+* `deps_info['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `deps_info['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `deps_info['dependency_r_txt_files']`:
+Exists only on dist_aar. It is the list of deps_info['r_text_path'] from
+transitive dependencies. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+
+* `deps_info['res_sources_path']`:
+Path to file containing a list of resource source files used by the
+android_resources target. This replaces `deps_info['resource_dirs']` which is
+now no longer used.
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+    If `deps_info['resource_dirs']` is missing, this must point to a prebuilt
+    `.aar` archive containing resources. Otherwise, this will point to a
+    zip archive generated at build time, wrapping the content of
+    `deps_info['resource_dirs']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['resource_overlay']`:
+Optional. Whether the resources in resources_zip should override resources with
+the same name. Does not affect the behaviour of any android_resources()
+dependencies of this target.  If a target with resource_overlay=true depends
+on another target with resource_overlay=true the target with the dependency
+overrides the other.
+
+* `deps_info['r_text_path']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text_path']`. This is
+*always* generated from the content of `deps_info['r_text_path']` by the
+`build/android/gyp/process_resources.py` script.
+
+* `deps_info['static_library_dependent_classpath_configs']`:
+Sub dictionary mapping .build_config paths to lists of jar files. For static
+library APKs, this defines which input jars belong to each
+static_library_dependent_target.
+
+* `deps_info['static_library_proguard_mapping_output_paths']`:
+Additional paths to copy the ProGuard mapping file to for static library
+APKs.
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+    NOTE: When not empty, the first items of `assets['sources']` must match
+    every item in this list. Extra sources correspond to non-renamed sources.
+
+    NOTE: This comes from the `asset_renaming_destinations` parameter for the
+    `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['public_deps_configs']`: List of paths to the `.build_config` files
+of *direct* dependencies of the current target which are exposed as part of the
+current target's public API. This should be a subset of
+deps_info['deps_configs'].
+
+* `deps_info['ignore_dependency_public_deps']`: If true, 'public_deps' will not
+be collected from the current target's direct deps.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['device_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed).
+
+* `deps_info['host_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (use by java_binary).
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar` on
+`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files.
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from
+`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['lint_android_manifest']`:
+Path to an AndroidManifest.xml file to use for this lint target.
+
+* `deps_info['lint_java_sources']`:
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies that are chromium code. Note: this is a list of files, where each
+file contains a list of Java source files. This is used for lint.
+
+* `deps_info['lint_aars']`:
+List of all aars from transitive java dependencies. This allows lint to collect
+their custom annotations.zip and run checks like @IntDef on their annotations.
+
+* `deps_info['lint_srcjars']`:
+List of all bundled srcjars of all transitive java library targets. Excludes
+non-chromium java libraries.
+
+* `deps_info['lint_resource_sources']`:
+List of all resource sources files belonging to all transitive resource
+dependencies of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['lint_resource_zips']`:
+List of all resource zip files belonging to all transitive resource dependencies
+of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['device_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['device_jar_path']` entries for the target and all
+its dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['path']`:
+Path to the final classes.dex file (or classes.zip in case of multi-dex)
+for this APK.
+
+* `deps_info['final_dex']['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['uncompress_shared_libraries']`
+A boolean indicating whether native libraries are stored uncompressed in the
+APK.
+
+* `native['loadable_modules']`
+A list of native libraries to store within the APK, in addition to those from
+`native['libraries']`. These correspond to things like the Chromium linker
+or instrumentation libraries.
+
+* `native['secondary_abi_loadable_modules']`
+Secondary ABI version of loadable_modules
+
+* `native['library_always_compress']`
+A list of library files that we always compress.
+
+* `native['library_renames']`
+A list of library files that we prepend "crazy." to their file names.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed as android assets.
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs']`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_classpath_jars']`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+* `deps_info['proguard_under_test_mapping']`:
+Applicable to apks with proguard enabled that have an apk_under_test. This is
+the path to the apk_under_test's output proguard .mapping file.
+
+## <a name="target_android_app_bundle_module">Target type \
+`android_app_bundle_module`</a>:
+
+Corresponds to an Android app bundle module. Very similar to an APK and
+inherits the same fields, except that this does not generate an installable
+file (see `android_app_bundle`), and for the following omitted fields:
+
+* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and
+  `deps_info['incremental_install_json_path']` are omitted.
+
+* top-level `dist_jar` is omitted as well.
+
+In addition to `android_apk` targets though come these new fields:
+
+* `deps_info['proto_resources_path']`:
+The path of an zip archive containing the APK's resources compiled to the
+protocol buffer format (instead of regular binary xml + resources.arsc).
+
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
+* `deps_info['module_pathmap_path']`:
+The path of the pathmap file generated when compiling the resources for the
+bundle module, if resource path shortening is enabled.
+
+* `deps_info['base_allowlist_rtxt_path']`:
+Optional path to an R.txt file used as a allowlist for base string resources.
+This means that any string resource listed in this file *and* in
+`deps_info['module_rtxt_path']` will end up in the base split APK of any
+`android_app_bundle` target that uses this target as its base module.
+
+This ensures that such localized strings are available to all bundle installs,
+even when language based splits are enabled (e.g. required for WebView strings
+inside the Monochrome bundle).
+
+
+## <a name="target_android_app_bundle">Target type `android_app_bundle`</a>
+
+This target type corresponds to an Android app bundle, and is built from one
+or more `android_app_bundle_module` targets listed as dependencies.
+
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+  * `deps_info['supports_android']` (always True).
+  * `deps_info['requires_android']` (always True).
+  * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+  * `deps_info['proguard_enabled']` (False by default).
+  * `deps_info['proguard_configs']` (optional).
+  * `deps_info['supports_android']` (True by default).
+  * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['resource_packages']`
+For `java_library` targets, this is the list of package names for all resource
+dependencies for the current target. Order must match the one from
+`javac['srcjars']`. For other target types, this key does not exist.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+## <a name="android_app_bundle">Target type `android_app_bundle`</a>:
+
+This type corresponds to an Android app bundle (`.aab` file).
+
+--------------- END_MARKDOWN ---------------------------------------------------
+"""
+
+from __future__ import print_function
+
+import collections
+import itertools
+import json
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+from util import resource_utils
+
+# TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+if sys.version_info.major == 2:
+  zip_longest = itertools.izip_longest
+else:
+  zip_longest = itertools.zip_longest
+
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
+               'junit_binary', 'android_app_bundle')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+
+
+class OrderedSet(collections.OrderedDict):
+  # Value |parameter| is present to avoid presubmit warning due to different
+  # number of parameters from overridden method.
+  @staticmethod
+  def fromkeys(iterable, value=None):
+    out = OrderedSet()
+    out.update(iterable)
+    return out
+
+  def add(self, key):
+    self[key] = True
+
+  def update(self, iterable):
+    for v in iterable:
+      self.add(v)
+
+
+def _ExtractMarkdownDocumentation(input_text):
+  """Extract Markdown documentation from a list of input strings lines.
+
+     This generates a list of strings extracted from |input_text|, by looking
+     for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+  in_markdown = False
+  result = []
+  for line in input_text.splitlines():
+    if in_markdown:
+      if '-- END_MARKDOWN --' in line:
+        in_markdown = False
+      else:
+        result.append(line)
+    else:
+      if '-- BEGIN_MARKDOWN --' in line:
+        in_markdown = True
+
+  return result
+
+class AndroidManifest(object):
+  def __init__(self, path):
+    self.path = path
+    dom = xml.dom.minidom.parse(path)
+    manifests = dom.getElementsByTagName('manifest')
+    assert len(manifests) == 1
+    self.manifest = manifests[0]
+
+  def GetInstrumentationElements(self):
+    instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+    if len(instrumentation_els) == 0:
+      return None
+    return instrumentation_els
+
+  def CheckInstrumentationElements(self, expected_package):
+    instrs = self.GetInstrumentationElements()
+    if not instrs:
+      raise Exception('No <instrumentation> elements found in %s' % self.path)
+    for instr in instrs:
+      instrumented_package = instr.getAttributeNS(
+          'http://schemas.android.com/apk/res/android', 'targetPackage')
+      if instrumented_package != expected_package:
+        raise Exception(
+            'Wrong instrumented package. Expected %s, got %s'
+            % (expected_package, instrumented_package))
+
+  def GetPackageName(self):
+    return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    with open(path) as jsonfile:
+      dep_config_cache[path] = json.load(jsonfile)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None):
+  def GetDeps(path):
+    config = GetDepConfig(path)
+    if filter_func and not filter_func(config):
+      return []
+    return config['deps_configs']
+
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+def GetObjectByPath(obj, key_path):
+  """Given an object, return its nth child based on a key path.
+  """
+  return GetObjectByPath(obj[key_path[0]], key_path[1:]) if key_path else obj
+
+
+def RemoveObjDups(obj, base, *key_path):
+  """Remove array items from an object[*kep_path] that are also
+     contained in the base[*kep_path] (duplicates).
+  """
+  base_target = set(GetObjectByPath(base, key_path))
+  target = GetObjectByPath(obj, key_path)
+  target[:] = [x for x in target if x not in base_target]
+
+
+class Deps(object):
+  def __init__(self, direct_deps_config_paths):
+    self._all_deps_config_paths = GetAllDepsConfigsInOrder(
+        direct_deps_config_paths)
+    self._direct_deps_configs = [
+        GetDepConfig(p) for p in direct_deps_config_paths
+    ]
+    self._all_deps_configs = [
+        GetDepConfig(p) for p in self._all_deps_config_paths
+    ]
+    self._direct_deps_config_paths = direct_deps_config_paths
+
+  def All(self, wanted_type=None):
+    if wanted_type is None:
+      return self._all_deps_configs
+    return DepsOfType(wanted_type, self._all_deps_configs)
+
+  def Direct(self, wanted_type=None):
+    if wanted_type is None:
+      return self._direct_deps_configs
+    return DepsOfType(wanted_type, self._direct_deps_configs)
+
+  def DirectAndChildPublicDeps(self, wanted_type=None):
+    """Returns direct dependencies and dependencies exported via public_deps of
+       direct dependencies.
+    """
+    dep_paths = set(self._direct_deps_config_paths)
+    for direct_dep in self._direct_deps_configs:
+      dep_paths.update(direct_dep.get('public_deps_configs', []))
+    deps_list = [GetDepConfig(p) for p in dep_paths]
+    if wanted_type is None:
+      return deps_list
+    return DepsOfType(wanted_type, deps_list)
+
+  def AllConfigPaths(self):
+    return self._all_deps_config_paths
+
+  def GradlePrebuiltJarPaths(self):
+    ret = []
+
+    def helper(cur):
+      for config in cur.Direct('java_library'):
+        if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
+          if config['unprocessed_jar_path'] not in ret:
+            ret.append(config['unprocessed_jar_path'])
+
+    helper(self)
+    return ret
+
+  def GradleLibraryProjectDeps(self):
+    ret = []
+
+    def helper(cur):
+      for config in cur.Direct('java_library'):
+        if config['is_prebuilt']:
+          pass
+        elif config['gradle_treat_as_prebuilt']:
+          helper(Deps(config['deps_configs']))
+        elif config not in ret:
+          ret.append(config)
+
+    helper(self)
+    return ret
+
+
+def _MergeAssets(all_assets):
+  """Merges all assets from the given deps.
+
+  Returns:
+    A tuple of: (compressed, uncompressed, locale_paks)
+    |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is
+    the path of the asset to add, and zipPath is the location within the zip
+    (excluding assets/ prefix).
+    |locale_paks| is a set of all zipPaths that have been marked as
+    treat_as_locale_paks=true.
+  """
+  compressed = {}
+  uncompressed = {}
+  locale_paks = set()
+  for asset_dep in all_assets:
+    entry = asset_dep['assets']
+    disable_compression = entry.get('disable_compression')
+    treat_as_locale_paks = entry.get('treat_as_locale_paks')
+    dest_map = uncompressed if disable_compression else compressed
+    other_map = compressed if disable_compression else uncompressed
+    outputs = entry.get('outputs', [])
+    for src, dest in zip_longest(entry['sources'], outputs):
+      if not dest:
+        dest = os.path.basename(src)
+      # Merge so that each path shows up in only one of the lists, and that
+      # deps of the same target override previous ones.
+      other_map.pop(dest, 0)
+      dest_map[dest] = src
+      if treat_as_locale_paks:
+        locale_paks.add(dest)
+
+  def create_list(asset_map):
+    ret = ['%s:%s' % (src, dest) for dest, src in asset_map.items()]
+    # Sort to ensure deterministic ordering.
+    ret.sort()
+    return ret
+
+  return create_list(compressed), create_list(uncompressed), locale_paks
+
+
+def _ResolveGroups(configs):
+  """Returns a list of configs with all groups inlined."""
+  ret = list(configs)
+  while True:
+    groups = DepsOfType('group', ret)
+    if not groups:
+      return ret
+    for config in groups:
+      index = ret.index(config)
+      expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
+      ret[index:index + 1] = expanded_configs
+
+
+def _DepsFromPaths(dep_paths,
+                   target_type,
+                   filter_root_targets=True,
+                   recursive_resource_deps=False):
+  """Resolves all groups and trims dependency branches that we never want.
+
+  E.g. When a resource or asset depends on an apk target, the intent is to
+  include the .apk as a resource/asset, not to have the apk's classpath added.
+
+  This method is meant to be called to get the top nodes (i.e. closest to
+  current target) that we could then use to get a full transitive dependants
+  list (eg using Deps#all). So filtering single elements out of this list,
+  filters whole branches of dependencies. By resolving groups (i.e. expanding
+  them to their constituents), depending on a group is equivalent to directly
+  depending on each element of that group.
+  """
+  blocklist = []
+  allowlist = []
+
+  # Don't allow root targets to be considered as a dep.
+  if filter_root_targets:
+    blocklist.extend(_ROOT_TYPES)
+
+  # Don't allow java libraries to cross through assets/resources.
+  if target_type in _RESOURCE_TYPES:
+    allowlist.extend(_RESOURCE_TYPES)
+    # Pretend that this target directly depends on all of its transitive
+    # dependencies.
+    if recursive_resource_deps:
+      dep_paths = GetAllDepsConfigsInOrder(dep_paths)
+      # Exclude assets if recursive_resource_deps is set. The
+      # recursive_resource_deps arg is used to pull resources into the base
+      # module to workaround bugs accessing resources in isolated DFMs, but
+      # assets should be kept in the DFMs.
+      blocklist.append('android_assets')
+
+  return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist)
+
+
+def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None):
+  """Resolves all groups and trims dependency branches that we never want.
+
+  See _DepsFromPaths.
+
+  |blocklist| if passed, are the types of direct dependencies we do not care
+  about (i.e. tips of branches that we wish to prune).
+
+  |allowlist| if passed, are the only types of direct dependencies we care
+  about (i.e. we wish to prune all other branches that do not start from one of
+  these).
+  """
+  configs = [GetDepConfig(p) for p in dep_paths]
+  groups = DepsOfType('group', configs)
+  configs = _ResolveGroups(configs)
+  configs += groups
+  if blocklist:
+    configs = [c for c in configs if c['type'] not in blocklist]
+  if allowlist:
+    configs = [c for c in configs if c['type'] in allowlist]
+
+  return Deps([c['path'] for c in configs])
+
+
+def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
+  ret = []
+  with open(runtime_deps_file) as f:
+    for line in f:
+      line = line.rstrip()
+      if not line.endswith('.so'):
+        continue
+      # Only unstripped .so files are listed in runtime deps.
+      # Convert to the stripped .so by going up one directory.
+      ret.append(os.path.normpath(line.replace('lib.unstripped/', '')))
+  ret.reverse()
+  return ret
+
+
+def _CreateJavaLibrariesList(library_paths):
+  """Returns a java literal array with the "base" library names:
+  e.g. libfoo.so -> foo
+  """
+  names = ['"%s"' % os.path.basename(s)[3:-3] for s in library_paths]
+  return ('{%s}' % ','.join(sorted(set(names))))
+
+
+def _CreateJavaLocaleListFromAssets(assets, locale_paks):
+  """Returns a java literal array from a list of locale assets.
+
+  Args:
+    assets: A list of all APK asset paths in the form 'src:dst'
+    locale_paks: A list of asset paths that correponds to the locale pak
+      files of interest. Each |assets| entry will have its 'dst' part matched
+      against it to determine if they are part of the result.
+  Returns:
+    A string that is a Java source literal array listing the locale names
+    of the corresponding asset files, without directory or .pak suffix.
+    E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }'
+  """
+  assets_paths = [a.split(':')[1] for a in assets]
+  locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks]
+  return '{%s}' % ','.join('"%s"' % l for l in sorted(locales))
+
+
+def _AddJarMapping(jar_to_target, configs):
+  for config in configs:
+    jar = config.get('unprocessed_jar_path')
+    if jar:
+      jar_to_target[jar] = config['gn_target']
+    for jar in config.get('extra_classpath_jars', []):
+      jar_to_target[jar] = config['gn_target']
+
+
+def _CompareClasspathPriority(dep):
+  return 1 if dep.get('low_classpath_priority') else 0
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option('--gn-target', help='GN label for this target')
+  parser.add_option(
+      '--deps-configs',
+      help='GN-list of dependent build_config files.')
+  parser.add_option(
+      '--annotation-processor-configs',
+      help='GN-list of build_config files for annotation processors.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+  parser.add_option('--resource-dirs', action='append', default=[],
+                    help='GYP-list of resource dirs')
+  parser.add_option(
+      '--res-sources-path',
+      help='Path to file containing a list of paths to resources.')
+  parser.add_option(
+      '--resource-overlay',
+      action='store_true',
+      help='Whether resources passed in via --resources-zip should override '
+      'resources with the same name')
+  parser.add_option(
+      '--recursive-resource-deps',
+      action='store_true',
+      help='Whether deps should be walked recursively to find resource deps.')
+
+  # android_assets options
+  parser.add_option('--asset-sources', help='List of asset sources.')
+  parser.add_option('--asset-renaming-sources',
+                    help='List of asset sources with custom destinations.')
+  parser.add_option('--asset-renaming-destinations',
+                    help='List of asset custom destinations.')
+  parser.add_option('--disable-asset-compression', action='store_true',
+                    help='Whether to disable asset compression.')
+  parser.add_option('--treat-as-locale-paks', action='store_true',
+      help='Consider the assets as locale paks in BuildConfig.java')
+
+  # java library options
+
+  parser.add_option('--public-deps-configs',
+                    help='GN list of config files of deps which are exposed as '
+                    'part of the target\'s public API.')
+  parser.add_option(
+      '--ignore-dependency-public-deps',
+      action='store_true',
+      help='If true, \'public_deps\' will not be collected from the current '
+      'target\'s direct deps.')
+  parser.add_option('--aar-path', help='Path to containing .aar file.')
+  parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
+  parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
+  parser.add_option('--unprocessed-jar-path',
+      help='Path to the .jar to use for javac classpath purposes.')
+  parser.add_option(
+      '--interface-jar-path',
+      help='Path to the interface .jar to use for javac classpath purposes.')
+  parser.add_option('--is-prebuilt', action='store_true',
+                    help='Whether the jar was compiled or pre-compiled.')
+  parser.add_option('--java-sources-file', help='Path to .sources file')
+  parser.add_option('--bundled-srcjars',
+      help='GYP-list of .srcjars that have been included in this java_library.')
+  parser.add_option('--supports-android', action='store_true',
+      help='Whether this library supports running on the Android platform.')
+  parser.add_option('--requires-android', action='store_true',
+      help='Whether this library requires running on the Android platform.')
+  parser.add_option('--bypass-platform-checks', action='store_true',
+      help='Bypass checks for support/require Android platform.')
+  parser.add_option('--extra-classpath-jars',
+      help='GYP-list of .jar files to include on the classpath when compiling, '
+           'but not to include in the final binary.')
+  parser.add_option(
+      '--low-classpath-priority',
+      action='store_true',
+      help='Indicates that the library should be placed at the end of the '
+      'classpath.')
+  parser.add_option(
+      '--mergeable-android-manifests',
+      help='GN-list of AndroidManifest.xml to include in manifest merging.')
+  parser.add_option('--gradle-treat-as-prebuilt', action='store_true',
+      help='Whether this library should be treated as a prebuilt library by '
+           'generate_gradle.py.')
+  parser.add_option('--main-class',
+      help='Main class for java_binary or java_annotation_processor targets.')
+  parser.add_option('--java-resources-jar-path',
+                    help='Path to JAR that contains java resources. Everything '
+                    'from this JAR except meta-inf/ content and .class files '
+                    'will be added to the final APK.')
+  parser.add_option(
+      '--non-chromium-code',
+      action='store_true',
+      help='True if a java library is not chromium code, used for lint.')
+
+  # android library options
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # native library options
+  parser.add_option('--shared-libraries-runtime-deps',
+                    help='Path to file containing runtime deps for shared '
+                         'libraries.')
+  parser.add_option(
+      '--loadable-modules',
+      action='append',
+      help='GN-list of native libraries for primary '
+      'android-abi. Can be specified multiple times.',
+      default=[])
+  parser.add_option('--secondary-abi-shared-libraries-runtime-deps',
+                    help='Path to file containing runtime deps for secondary '
+                         'abi shared libraries.')
+  parser.add_option(
+      '--secondary-abi-loadable-modules',
+      action='append',
+      help='GN-list of native libraries for secondary '
+      'android-abi. Can be specified multiple times.',
+      default=[])
+  parser.add_option(
+      '--native-lib-placeholders',
+      action='append',
+      help='GN-list of native library placeholders to add.',
+      default=[])
+  parser.add_option(
+      '--secondary-native-lib-placeholders',
+      action='append',
+      help='GN-list of native library placeholders to add '
+      'for the secondary android-abi.',
+      default=[])
+  parser.add_option('--uncompress-shared-libraries', default=False,
+                    action='store_true',
+                    help='Whether to store native libraries uncompressed')
+  parser.add_option(
+      '--library-always-compress',
+      help='The list of library files that we always compress.')
+  parser.add_option(
+      '--library-renames',
+      default=[],
+      help='The list of library files that we prepend crazy. to their names.')
+
+  # apk options
+  parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+  parser.add_option('--incremental-apk-path',
+                    help="Path to the target's incremental apk output.")
+  parser.add_option('--incremental-install-json-path',
+                    help="Path to the target's generated incremental install "
+                    "json.")
+  parser.add_option(
+      '--tested-apk-config',
+      help='Path to the build config of the tested apk (for an instrumentation '
+      'test apk).')
+  parser.add_option(
+      '--proguard-enabled',
+      action='store_true',
+      help='Whether proguard is enabled for this apk or bundle module.')
+  parser.add_option(
+      '--proguard-configs',
+      help='GN-list of proguard flag files to use in final apk.')
+  parser.add_option(
+      '--proguard-mapping-path', help='Path to jar created by ProGuard step')
+
+  # apk options that are static library specific
+  parser.add_option(
+      '--static-library-dependent-configs',
+      help='GN list of .build_configs of targets that use this target as a '
+      'static library.')
+
+  # options shared between android_resources and apk targets
+  parser.add_option('--r-text-path', help='Path to target\'s R.txt file.')
+
+  parser.add_option('--fail',
+      help='GN-list of error message lines to fail with.')
+
+  parser.add_option('--final-dex-path',
+                    help='Path to final input classes.dex (or classes.zip) to '
+                    'use in final apk.')
+  parser.add_option('--res-size-info', help='Path to .ap_.info')
+  parser.add_option('--apk-proto-resources',
+                    help='Path to resources compiled in protocol buffer format '
+                         ' for this apk.')
+  parser.add_option(
+      '--module-pathmap-path',
+      help='Path to pathmap file for resource paths in a bundle module.')
+  parser.add_option(
+      '--base-allowlist-rtxt-path',
+      help='Path to R.txt file for the base resources allowlist.')
+  parser.add_option(
+      '--is-base-module',
+      action='store_true',
+      help='Specifies that this module is a base module for some app bundle.')
+
+  parser.add_option('--generate-markdown-format-doc', action='store_true',
+                    help='Dump the Markdown .build_config format documentation '
+                    'then exit immediately.')
+
+  parser.add_option(
+      '--base-module-build-config',
+      help='Path to the base module\'s build config '
+      'if this is a feature module.')
+
+  parser.add_option(
+      '--module-build-configs',
+      help='For bundles, the paths of all non-async module .build_configs '
+      'for modules that are part of the bundle.')
+
+  parser.add_option('--version-name', help='Version name for this APK.')
+  parser.add_option('--version-code', help='Version code for this APK.')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  if options.generate_markdown_format_doc:
+    doc_lines = _ExtractMarkdownDocumentation(__doc__)
+    for line in doc_lines:
+      print(line)
+    return 0
+
+  if options.fail:
+    parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+
+  lib_options = ['unprocessed_jar_path', 'interface_jar_path']
+  device_lib_options = ['device_jar_path', 'dex_path']
+  required_options_map = {
+      'android_apk': ['build_config'] + lib_options + device_lib_options,
+      'android_app_bundle_module':
+      ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
+      device_lib_options,
+      'android_assets': ['build_config'],
+      'android_resources': ['build_config', 'resources_zip'],
+      'dist_aar': ['build_config'],
+      'dist_jar': ['build_config'],
+      'group': ['build_config'],
+      'java_annotation_processor': ['build_config', 'main_class'],
+      'java_binary': ['build_config'],
+      'java_library': ['build_config', 'host_jar_path'] + lib_options,
+      'junit_binary': ['build_config'],
+      'system_java_library': ['build_config', 'unprocessed_jar_path'],
+      'android_app_bundle': ['build_config', 'module_build_configs'],
+  }
+  required_options = required_options_map.get(options.type)
+  if not required_options:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  if options.type != 'android_app_bundle_module':
+    if options.apk_proto_resources:
+      raise Exception('--apk-proto-resources can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.module_pathmap_path:
+      raise Exception('--module-pathmap-path can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.base_allowlist_rtxt_path:
+      raise Exception('--base-allowlist-rtxt-path can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.is_base_module:
+      raise Exception('--is-base-module can only be used with '
+                      '--type=android_app_bundle_module')
+
+  is_apk_or_module_target = options.type in ('android_apk',
+      'android_app_bundle_module')
+
+  if not is_apk_or_module_target:
+    if options.uncompress_shared_libraries:
+      raise Exception('--uncompressed-shared-libraries can only be used '
+                      'with --type=android_apk or '
+                      '--type=android_app_bundle_module')
+    if options.library_always_compress:
+      raise Exception(
+          '--library-always-compress can only be used with --type=android_apk '
+          'or --type=android_app_bundle_module')
+    if options.library_renames:
+      raise Exception(
+          '--library-renames can only be used with --type=android_apk or '
+          '--type=android_app_bundle_module')
+
+  if options.device_jar_path and not options.dex_path:
+    raise Exception('java_library that supports Android requires a dex path.')
+  if any(getattr(options, x) for x in lib_options):
+    for attr in lib_options:
+      if not getattr(options, attr):
+        raise('Expected %s to be set.' % attr)
+
+  if options.requires_android and not options.supports_android:
+    raise Exception(
+        '--supports-android is required when using --requires-android')
+
+  is_java_target = options.type in (
+      'java_binary', 'junit_binary', 'java_annotation_processor',
+      'java_library', 'android_apk', 'dist_aar', 'dist_jar',
+      'system_java_library', 'android_app_bundle_module')
+
+  is_static_library_dex_provider_target = (
+      options.static_library_dependent_configs and options.proguard_enabled)
+  if is_static_library_dex_provider_target:
+    if options.type != 'android_apk':
+      raise Exception(
+          '--static-library-dependent-configs only supports --type=android_apk')
+  options.static_library_dependent_configs = build_utils.ParseGnList(
+      options.static_library_dependent_configs)
+  static_library_dependent_configs_by_path = {
+      p: GetDepConfig(p)
+      for p in options.static_library_dependent_configs
+  }
+
+  deps_configs_paths = build_utils.ParseGnList(options.deps_configs)
+  deps = _DepsFromPaths(deps_configs_paths,
+                        options.type,
+                        recursive_resource_deps=options.recursive_resource_deps)
+  processor_deps = _DepsFromPaths(
+      build_utils.ParseGnList(options.annotation_processor_configs or ''),
+      options.type, filter_root_targets=False)
+
+  all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths() +
+                list(static_library_dependent_configs_by_path))
+
+  if options.recursive_resource_deps:
+    # Include java_library targets since changes to these targets can remove
+    # resource deps from the build, which would require rebuilding this target's
+    # build config file: crbug.com/1168655.
+    recursive_java_deps = _DepsFromPathsWithFilters(
+        GetAllDepsConfigsInOrder(deps_configs_paths),
+        allowlist=['java_library'])
+    all_inputs.extend(recursive_java_deps.AllConfigPaths())
+
+  direct_deps = deps.Direct()
+  system_library_deps = deps.Direct('system_java_library')
+  all_deps = deps.All()
+  all_library_deps = deps.All('java_library')
+  all_resources_deps = deps.All('android_resources')
+
+  if options.type == 'java_library':
+    java_library_deps = _DepsFromPathsWithFilters(
+        deps_configs_paths, allowlist=['android_resources'])
+    # for java libraries, we only care about resources that are directly
+    # reachable without going through another java_library.
+    all_resources_deps = java_library_deps.All('android_resources')
+  if options.type == 'android_resources' and options.recursive_resource_deps:
+    # android_resources targets that want recursive resource deps also need to
+    # collect package_names from all library deps. This ensures the R.java files
+    # for these libraries will get pulled in along with the resources.
+    android_resources_library_deps = _DepsFromPathsWithFilters(
+        deps_configs_paths, allowlist=['java_library']).All('java_library')
+  if is_apk_or_module_target:
+    # android_resources deps which had recursive_resource_deps set should not
+    # have the manifests from the recursively collected deps added to this
+    # module. This keeps the manifest declarations in the child DFMs, since they
+    # will have the Java implementations.
+    def ExcludeRecursiveResourcesDeps(config):
+      return not config.get('includes_recursive_resources', False)
+
+    extra_manifest_deps = [
+        GetDepConfig(p) for p in GetAllDepsConfigsInOrder(
+            deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps)
+    ]
+
+  base_module_build_config = None
+  if options.base_module_build_config:
+    with open(options.base_module_build_config, 'r') as f:
+      base_module_build_config = json.load(f)
+
+  # Initialize some common config.
+  # Any value that needs to be queryable by dependents must go within deps_info.
+  config = {
+      'deps_info': {
+          'name': os.path.basename(options.build_config),
+          'path': options.build_config,
+          'type': options.type,
+          'gn_target': options.gn_target,
+          'deps_configs': [d['path'] for d in direct_deps],
+          'chromium_code': not options.non_chromium_code,
+      },
+      # Info needed only by generate_gradle.py.
+      'gradle': {}
+  }
+  deps_info = config['deps_info']
+  gradle = config['gradle']
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_config = tested_apk_deps.Direct()[0]
+    gradle['apk_under_test'] = tested_apk_config['name']
+
+  if options.type == 'android_app_bundle_module':
+    deps_info['is_base_module'] = bool(options.is_base_module)
+
+  # Required for generating gradle files.
+  if options.type == 'java_library':
+    deps_info['is_prebuilt'] = bool(options.is_prebuilt)
+    deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+
+  if options.android_manifest:
+    deps_info['android_manifest'] = options.android_manifest
+
+  if options.bundled_srcjars:
+    deps_info['bundled_srcjars'] = build_utils.ParseGnList(
+        options.bundled_srcjars)
+
+  if options.java_sources_file:
+    deps_info['java_sources_file'] = options.java_sources_file
+
+  if is_java_target:
+    if options.bundled_srcjars:
+      gradle['bundled_srcjars'] = deps_info['bundled_srcjars']
+
+    gradle['dependent_android_projects'] = []
+    gradle['dependent_java_projects'] = []
+    gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
+
+    if options.main_class:
+      deps_info['main_class'] = options.main_class
+
+    for c in deps.GradleLibraryProjectDeps():
+      if c['requires_android']:
+        gradle['dependent_android_projects'].append(c['path'])
+      else:
+        gradle['dependent_java_projects'].append(c['path'])
+
+  if options.r_text_path:
+    deps_info['r_text_path'] = options.r_text_path
+
+  # TODO(tiborg): Remove creation of JNI info for type group and java_library
+  # once we can generate the JNI registration based on APK / module targets as
+  # opposed to groups and libraries.
+  if is_apk_or_module_target or options.type in (
+      'group', 'java_library', 'junit_binary'):
+    deps_info['jni'] = {}
+    all_java_sources = [c['java_sources_file'] for c in all_library_deps
+                        if 'java_sources_file' in c]
+    if options.java_sources_file:
+      all_java_sources.append(options.java_sources_file)
+
+    if options.apk_proto_resources:
+      deps_info['proto_resources_path'] = options.apk_proto_resources
+
+    deps_info['version_name'] = options.version_name
+    deps_info['version_code'] = options.version_code
+    if options.module_pathmap_path:
+      deps_info['module_pathmap_path'] = options.module_pathmap_path
+    else:
+      # Ensure there is an entry, even if it is empty, for modules
+      # that have not enabled resource path shortening. Otherwise
+      # build_utils.ExpandFileArgs fails.
+      deps_info['module_pathmap_path'] = ''
+
+    if options.base_allowlist_rtxt_path:
+      deps_info['base_allowlist_rtxt_path'] = options.base_allowlist_rtxt_path
+    else:
+      # Ensure there is an entry, even if it is empty, for modules
+      # that don't need such a allowlist.
+      deps_info['base_allowlist_rtxt_path'] = ''
+
+  if is_java_target:
+    deps_info['requires_android'] = bool(options.requires_android)
+    deps_info['supports_android'] = bool(options.supports_android)
+
+    if not options.bypass_platform_checks:
+      deps_require_android = (all_resources_deps +
+          [d['name'] for d in all_library_deps if d['requires_android']])
+      deps_not_support_android = (
+          [d['name'] for d in all_library_deps if not d['supports_android']])
+
+      if deps_require_android and not options.requires_android:
+        raise Exception('Some deps require building for the Android platform: '
+            + str(deps_require_android))
+
+      if deps_not_support_android and options.supports_android:
+        raise Exception('Not all deps support the Android platform: '
+            + str(deps_not_support_android))
+
+  if is_apk_or_module_target or options.type == 'dist_jar':
+    all_dex_files = [c['dex_path'] for c in all_library_deps]
+
+  if is_java_target:
+    # Classpath values filled in below (after applying tested_apk_config).
+    config['javac'] = {}
+    if options.aar_path:
+      deps_info['aar_path'] = options.aar_path
+    if options.unprocessed_jar_path:
+      deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
+      deps_info['interface_jar_path'] = options.interface_jar_path
+    if options.public_deps_configs:
+      deps_info['public_deps_configs'] = build_utils.ParseGnList(
+          options.public_deps_configs)
+    if options.device_jar_path:
+      deps_info['device_jar_path'] = options.device_jar_path
+    if options.host_jar_path:
+      deps_info['host_jar_path'] = options.host_jar_path
+    if options.dex_path:
+      deps_info['dex_path'] = options.dex_path
+      if is_apk_or_module_target:
+        all_dex_files.append(options.dex_path)
+    if options.low_classpath_priority:
+      deps_info['low_classpath_priority'] = True
+    if options.type == 'android_apk':
+      deps_info['apk_path'] = options.apk_path
+      deps_info['incremental_apk_path'] = options.incremental_apk_path
+      deps_info['incremental_install_json_path'] = (
+          options.incremental_install_json_path)
+
+  if options.type == 'android_assets':
+    all_asset_sources = []
+    if options.asset_renaming_sources:
+      all_asset_sources.extend(
+          build_utils.ParseGnList(options.asset_renaming_sources))
+    if options.asset_sources:
+      all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+
+    deps_info['assets'] = {
+        'sources': all_asset_sources
+    }
+    if options.asset_renaming_destinations:
+      deps_info['assets']['outputs'] = (
+          build_utils.ParseGnList(options.asset_renaming_destinations))
+    if options.disable_asset_compression:
+      deps_info['assets']['disable_compression'] = True
+    if options.treat_as_locale_paks:
+      deps_info['assets']['treat_as_locale_paks'] = True
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.resource_overlay:
+      deps_info['resource_overlay'] = True
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.android_manifest:
+      manifest = AndroidManifest(options.android_manifest)
+      deps_info['package_name'] = manifest.GetPackageName()
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+    deps_info['res_sources_path'] = ''
+    if options.res_sources_path:
+      deps_info['res_sources_path'] = options.res_sources_path
+
+  if options.requires_android and options.type == 'java_library':
+    # Used to strip out R.class for android_prebuilt()s.
+    config['javac']['resource_packages'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c
+    ]
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+
+  if options.type in ('android_resources', 'android_apk', 'junit_binary',
+                      'dist_aar', 'android_app_bundle_module', 'java_library'):
+    dependency_zips = []
+    dependency_zip_overlays = []
+    for c in all_resources_deps:
+      if not c['resources_zip']:
+        continue
+
+      dependency_zips.append(c['resources_zip'])
+      if c.get('resource_overlay'):
+        dependency_zip_overlays.append(c['resources_zip'])
+
+    extra_package_names = []
+
+    if options.type != 'android_resources':
+      extra_package_names = [
+          c['package_name'] for c in all_resources_deps if 'package_name' in c
+      ]
+
+      # android_resources targets which specified recursive_resource_deps may
+      # have extra_package_names.
+      for resources_dep in all_resources_deps:
+        extra_package_names.extend(resources_dep['extra_package_names'])
+
+      # In final types (i.e. apks and modules) that create real R.java files,
+      # they must collect package names from java_libraries as well.
+      # https://crbug.com/1073476
+      if options.type != 'java_library':
+        extra_package_names.extend([
+            c['package_name'] for c in all_library_deps if 'package_name' in c
+        ])
+    elif options.recursive_resource_deps:
+      # Pull extra_package_names from library deps if recursive resource deps
+      # are required.
+      extra_package_names = [
+          c['package_name'] for c in android_resources_library_deps
+          if 'package_name' in c
+      ]
+      config['deps_info']['includes_recursive_resources'] = True
+
+    if options.type in ('dist_aar', 'java_library'):
+      r_text_files = [
+          c['r_text_path'] for c in all_resources_deps if 'r_text_path' in c
+      ]
+      deps_info['dependency_r_txt_files'] = r_text_files
+
+    # For feature modules, remove any resources that already exist in the base
+    # module.
+    if base_module_build_config:
+      dependency_zips = [
+          c for c in dependency_zips
+          if c not in base_module_build_config['deps_info']['dependency_zips']
+      ]
+      dependency_zip_overlays = [
+          c for c in dependency_zip_overlays if c not in
+          base_module_build_config['deps_info']['dependency_zip_overlays']
+      ]
+      extra_package_names = [
+          c for c in extra_package_names if c not in
+          base_module_build_config['deps_info']['extra_package_names']
+      ]
+
+    if options.type == 'android_apk' and options.tested_apk_config:
+      config['deps_info']['arsc_package_name'] = (
+          tested_apk_config['package_name'])
+      # We should not shadow the actual R.java files of the apk_under_test by
+      # creating new R.java files with the same package names in the tested apk.
+      extra_package_names = [
+          package for package in extra_package_names
+          if package not in tested_apk_config['extra_package_names']
+      ]
+    if options.res_size_info:
+      config['deps_info']['res_size_info'] = options.res_size_info
+
+    config['deps_info']['dependency_zips'] = dependency_zips
+    config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays
+    config['deps_info']['extra_package_names'] = extra_package_names
+
+  # These are .jars to add to javac classpath but not to runtime classpath.
+  extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+  if extra_classpath_jars:
+    deps_info['extra_classpath_jars'] = extra_classpath_jars
+
+  mergeable_android_manifests = build_utils.ParseGnList(
+      options.mergeable_android_manifests)
+  if mergeable_android_manifests:
+    deps_info['mergeable_android_manifests'] = mergeable_android_manifests
+
+  extra_proguard_classpath_jars = []
+  proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  if proguard_configs:
+    # Make a copy of |proguard_configs| since it's mutated below.
+    deps_info['proguard_configs'] = list(proguard_configs)
+
+
+  if is_java_target:
+    if options.ignore_dependency_public_deps:
+      classpath_direct_deps = deps.Direct()
+      classpath_direct_library_deps = deps.Direct('java_library')
+    else:
+      classpath_direct_deps = deps.DirectAndChildPublicDeps()
+      classpath_direct_library_deps = deps.DirectAndChildPublicDeps(
+          'java_library')
+
+    # The classpath used to compile this target when annotation processors are
+    # present.
+    javac_classpath = set(c['unprocessed_jar_path']
+                          for c in classpath_direct_library_deps)
+    # The classpath used to compile this target when annotation processors are
+    # not present. These are also always used to know when a target needs to be
+    # rebuilt.
+    javac_interface_classpath = set(c['interface_jar_path']
+                                    for c in classpath_direct_library_deps)
+
+    # Preserve order of |all_library_deps|. Move low priority libraries to the
+    # end of the classpath.
+    all_library_deps_sorted_for_classpath = sorted(
+        all_library_deps[::-1], key=_CompareClasspathPriority)
+
+    # The classpath used for bytecode-rewritting.
+    javac_full_classpath = OrderedSet.fromkeys(
+        c['unprocessed_jar_path']
+        for c in all_library_deps_sorted_for_classpath)
+    # The classpath used for error prone.
+    javac_full_interface_classpath = OrderedSet.fromkeys(
+        c['interface_jar_path'] for c in all_library_deps_sorted_for_classpath)
+
+    # Adding base module to classpath to compile against its R.java file
+    if base_module_build_config:
+      javac_full_classpath.add(
+          base_module_build_config['deps_info']['unprocessed_jar_path'])
+      javac_full_interface_classpath.add(
+          base_module_build_config['deps_info']['interface_jar_path'])
+      # Turbine now compiles headers against only the direct classpath, so the
+      # base module's interface jar must be on the direct interface classpath.
+      javac_interface_classpath.add(
+          base_module_build_config['deps_info']['interface_jar_path'])
+
+    for dep in classpath_direct_deps:
+      if 'extra_classpath_jars' in dep:
+        javac_classpath.update(dep['extra_classpath_jars'])
+        javac_interface_classpath.update(dep['extra_classpath_jars'])
+    for dep in all_deps:
+      if 'extra_classpath_jars' in dep:
+        javac_full_classpath.update(dep['extra_classpath_jars'])
+        javac_full_interface_classpath.update(dep['extra_classpath_jars'])
+
+    # TODO(agrieve): Might be less confusing to fold these into bootclasspath.
+    # Deps to add to the compile-time classpath (but not the runtime classpath).
+    # These are jars specified by input_jars_paths that almost never change.
+    # Just add them directly to all the classpaths.
+    if options.extra_classpath_jars:
+      javac_classpath.update(extra_classpath_jars)
+      javac_interface_classpath.update(extra_classpath_jars)
+      javac_full_classpath.update(extra_classpath_jars)
+      javac_full_interface_classpath.update(extra_classpath_jars)
+
+  if is_java_target or options.type == 'android_app_bundle':
+    # The classpath to use to run this target (or as an input to ProGuard).
+    device_classpath = []
+    if is_java_target and options.device_jar_path:
+      device_classpath.append(options.device_jar_path)
+    device_classpath.extend(
+        c.get('device_jar_path') for c in all_library_deps
+        if c.get('device_jar_path'))
+    if options.type == 'android_app_bundle':
+      for d in deps.Direct('android_app_bundle_module'):
+        device_classpath.extend(c for c in d.get('device_classpath', [])
+                                if c not in device_classpath)
+
+  if options.type in ('dist_jar', 'java_binary', 'junit_binary'):
+    # The classpath to use to run this target.
+    host_classpath = []
+    if options.host_jar_path:
+      host_classpath.append(options.host_jar_path)
+    host_classpath.extend(c['host_jar_path'] for c in all_library_deps)
+    deps_info['host_classpath'] = host_classpath
+
+  # We allow lint to be run on android_apk targets, so we collect lint
+  # artifacts for them.
+  # We allow lint to be run on android_app_bundle targets, so we need to
+  # collect lint artifacts for the android_app_bundle_module targets that the
+  # bundle includes. Different android_app_bundle targets may include different
+  # android_app_bundle_module targets, so the bundle needs to be able to
+  # de-duplicate these lint artifacts.
+  if options.type in ('android_app_bundle_module', 'android_apk'):
+    # Collect all sources and resources at the apk/bundle_module level.
+    lint_aars = set()
+    lint_srcjars = set()
+    lint_java_sources = set()
+    lint_resource_sources = set()
+    lint_resource_zips = set()
+
+    if options.java_sources_file:
+      lint_java_sources.add(options.java_sources_file)
+    if options.bundled_srcjars:
+      lint_srcjars.update(deps_info['bundled_srcjars'])
+    for c in all_library_deps:
+      if c['chromium_code'] and c['requires_android']:
+        if 'java_sources_file' in c:
+          lint_java_sources.add(c['java_sources_file'])
+        lint_srcjars.update(c['bundled_srcjars'])
+      if 'aar_path' in c:
+        lint_aars.add(c['aar_path'])
+
+    if options.res_sources_path:
+      lint_resource_sources.add(options.res_sources_path)
+    if options.resources_zip:
+      lint_resource_zips.add(options.resources_zip)
+    for c in all_resources_deps:
+      if c['chromium_code']:
+        # Prefer res_sources_path to resources_zips so that lint errors have
+        # real paths and to avoid needing to extract during lint.
+        if c['res_sources_path']:
+          lint_resource_sources.add(c['res_sources_path'])
+        else:
+          lint_resource_zips.add(c['resources_zip'])
+
+    deps_info['lint_aars'] = sorted(lint_aars)
+    deps_info['lint_srcjars'] = sorted(lint_srcjars)
+    deps_info['lint_java_sources'] = sorted(lint_java_sources)
+    deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+    deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+    deps_info['lint_extra_android_manifests'] = []
+
+    if options.type == 'android_apk':
+      assert options.android_manifest, 'Android APKs must define a manifest'
+      deps_info['lint_android_manifest'] = options.android_manifest
+
+  if options.type == 'android_app_bundle':
+    module_configs = [
+        GetDepConfig(c)
+        for c in build_utils.ParseGnList(options.module_build_configs)
+    ]
+    jni_all_source = set()
+    lint_aars = set()
+    lint_srcjars = set()
+    lint_java_sources = set()
+    lint_resource_sources = set()
+    lint_resource_zips = set()
+    lint_extra_android_manifests = set()
+    for c in module_configs:
+      if c['is_base_module']:
+        assert 'base_module_config' not in deps_info, (
+            'Must have exactly 1 base module!')
+        deps_info['base_module_config'] = c['path']
+        # Use the base module's android manifest for linting.
+        deps_info['lint_android_manifest'] = c['android_manifest']
+      else:
+        lint_extra_android_manifests.add(c['android_manifest'])
+      jni_all_source.update(c['jni']['all_source'])
+      lint_aars.update(c['lint_aars'])
+      lint_srcjars.update(c['lint_srcjars'])
+      lint_java_sources.update(c['lint_java_sources'])
+      lint_resource_sources.update(c['lint_resource_sources'])
+      lint_resource_zips.update(c['lint_resource_zips'])
+    deps_info['jni'] = {'all_source': sorted(jni_all_source)}
+    deps_info['lint_aars'] = sorted(lint_aars)
+    deps_info['lint_srcjars'] = sorted(lint_srcjars)
+    deps_info['lint_java_sources'] = sorted(lint_java_sources)
+    deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+    deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+    deps_info['lint_extra_android_manifests'] = sorted(
+        lint_extra_android_manifests)
+
+  # Map configs to classpath entries that should be included in their final dex.
+  classpath_entries_by_owning_config = collections.defaultdict(list)
+  extra_main_r_text_files = []
+  if is_static_library_dex_provider_target:
+    # Map classpath entries to configs that include them in their classpath.
+    configs_by_classpath_entry = collections.defaultdict(list)
+    static_lib_jar_paths = {}
+    for config_path, dep_config in (sorted(
+        static_library_dependent_configs_by_path.items())):
+      # For bundles, only the jar path and jni sources of the base module
+      # are relevant for proguard. Should be updated when bundle feature
+      # modules support JNI.
+      base_config = dep_config
+      if dep_config['type'] == 'android_app_bundle':
+        base_config = GetDepConfig(dep_config['base_module_config'])
+      extra_main_r_text_files.append(base_config['r_text_path'])
+      static_lib_jar_paths[config_path] = base_config['device_jar_path']
+      proguard_configs.extend(dep_config['proguard_all_configs'])
+      extra_proguard_classpath_jars.extend(
+          dep_config['proguard_classpath_jars'])
+      all_java_sources.extend(base_config['jni']['all_source'])
+
+      # The srcjars containing the generated R.java files are excluded for APK
+      # targets the use static libraries, so we add them here to ensure the
+      # union of resource IDs are available in the static library APK.
+      for package in base_config['extra_package_names']:
+        if package not in extra_package_names:
+          extra_package_names.append(package)
+      for cp_entry in dep_config['device_classpath']:
+        configs_by_classpath_entry[cp_entry].append(config_path)
+
+    for cp_entry in device_classpath:
+      configs_by_classpath_entry[cp_entry].append(options.build_config)
+
+    for cp_entry, candidate_configs in configs_by_classpath_entry.items():
+      config_path = (candidate_configs[0]
+                     if len(candidate_configs) == 1 else options.build_config)
+      classpath_entries_by_owning_config[config_path].append(cp_entry)
+      device_classpath.append(cp_entry)
+
+    device_classpath = sorted(set(device_classpath))
+
+  deps_info['static_library_proguard_mapping_output_paths'] = sorted([
+      d['proguard_mapping_path']
+      for d in static_library_dependent_configs_by_path.values()
+  ])
+  deps_info['static_library_dependent_classpath_configs'] = {
+      path: sorted(set(classpath))
+      for path, classpath in classpath_entries_by_owning_config.items()
+  }
+  deps_info['extra_main_r_text_files'] = sorted(extra_main_r_text_files)
+
+  if is_apk_or_module_target or options.type in ('group', 'java_library',
+                                                 'junit_binary'):
+    deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+
+  system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
+  system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
+  if system_library_deps:
+    config['android'] = {}
+    config['android']['sdk_interface_jars'] = system_interface_jars
+    config['android']['sdk_jars'] = system_jars
+
+  if options.type in ('android_apk', 'dist_aar',
+      'dist_jar', 'android_app_bundle_module', 'android_app_bundle'):
+    for c in all_deps:
+      proguard_configs.extend(c.get('proguard_configs', []))
+      extra_proguard_classpath_jars.extend(c.get('extra_classpath_jars', []))
+    if options.type == 'android_app_bundle':
+      for c in deps.Direct('android_app_bundle_module'):
+        proguard_configs.extend(p for p in c.get('proguard_configs', []))
+    if options.type == 'android_app_bundle':
+      for d in deps.Direct('android_app_bundle_module'):
+        extra_proguard_classpath_jars.extend(
+            c for c in d.get('proguard_classpath_jars', [])
+            if c not in extra_proguard_classpath_jars)
+
+    if options.type == 'android_app_bundle':
+      deps_proguard_enabled = []
+      deps_proguard_disabled = []
+      for d in deps.Direct('android_app_bundle_module'):
+        if not d['device_classpath']:
+          # We don't care about modules that have no Java code for proguarding.
+          continue
+        if d['proguard_enabled']:
+          deps_proguard_enabled.append(d['name'])
+        else:
+          deps_proguard_disabled.append(d['name'])
+      if deps_proguard_enabled and deps_proguard_disabled:
+        raise Exception('Deps %s have proguard enabled while deps %s have '
+                        'proguard disabled' % (deps_proguard_enabled,
+                                               deps_proguard_disabled))
+    deps_info['proguard_enabled'] = bool(options.proguard_enabled)
+
+    if options.proguard_mapping_path:
+      deps_info['proguard_mapping_path'] = options.proguard_mapping_path
+
+  # The java code for an instrumentation test apk is assembled differently for
+  # ProGuard vs. non-ProGuard.
+  #
+  # Without ProGuard: Each library's jar is dexed separately and then combined
+  # into a single classes.dex. A test apk will include all dex files not already
+  # present in the apk-under-test. At runtime all test code lives in the test
+  # apk, and the program code lives in the apk-under-test.
+  #
+  # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs
+  # a single .jar, which is then dexed into a classes.dex. A test apk includes
+  # all jar files from the program and the tests because having them separate
+  # doesn't work with ProGuard's whole-program optimizations. Although the
+  # apk-under-test still has all of its code in its classes.dex, none of it is
+  # used at runtime because the copy of it within the test apk takes precidence.
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    if tested_apk_config['proguard_enabled']:
+      assert options.proguard_enabled, ('proguard must be enabled for '
+          'instrumentation apks if it\'s enabled for the tested apk.')
+      # Mutating lists, so no need to explicitly re-assign to dict.
+      proguard_configs.extend(
+          p for p in tested_apk_config['proguard_all_configs'])
+      extra_proguard_classpath_jars.extend(
+          p for p in tested_apk_config['proguard_classpath_jars'])
+      tested_apk_config = GetDepConfig(options.tested_apk_config)
+      deps_info['proguard_under_test_mapping'] = (
+          tested_apk_config['proguard_mapping_path'])
+    elif options.proguard_enabled:
+      # Not sure why you'd want to proguard the test apk when the under-test apk
+      # is not proguarded, but it's easy enough to support.
+      deps_info['proguard_under_test_mapping'] = ''
+
+    # Add all tested classes to the test's classpath to ensure that the test's
+    # java code is a superset of the tested apk's java code
+    device_classpath_extended = list(device_classpath)
+    device_classpath_extended.extend(
+        p for p in tested_apk_config['device_classpath']
+        if p not in device_classpath)
+    # Include in the classpath classes that are added directly to the apk under
+    # test (those that are not a part of a java_library).
+    javac_classpath.add(tested_apk_config['unprocessed_jar_path'])
+    javac_interface_classpath.add(tested_apk_config['interface_jar_path'])
+    javac_full_classpath.add(tested_apk_config['unprocessed_jar_path'])
+    javac_full_interface_classpath.add(tested_apk_config['interface_jar_path'])
+    javac_full_classpath.update(tested_apk_config['javac_full_classpath'])
+    javac_full_interface_classpath.update(
+        tested_apk_config['javac_full_interface_classpath'])
+
+    # Exclude .jar files from the test apk that exist within the apk under test.
+    tested_apk_library_deps = tested_apk_deps.All('java_library')
+    tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps}
+    all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files]
+    tested_apk_jar_files = set(tested_apk_config['device_classpath'])
+    device_classpath = [
+        p for p in device_classpath if p not in tested_apk_jar_files
+    ]
+
+  if options.type in ('android_apk', 'dist_aar', 'dist_jar',
+                      'android_app_bundle_module', 'android_app_bundle'):
+    deps_info['proguard_all_configs'] = sorted(set(proguard_configs))
+    deps_info['proguard_classpath_jars'] = sorted(
+        set(extra_proguard_classpath_jars))
+
+  # Dependencies for the final dex file of an apk.
+  if (is_apk_or_module_target or options.final_dex_path
+      or options.type == 'dist_jar'):
+    config['final_dex'] = {}
+    dex_config = config['final_dex']
+    dex_config['path'] = options.final_dex_path
+  if is_apk_or_module_target or options.type == 'dist_jar':
+    dex_config['all_dex_files'] = all_dex_files
+
+  if is_java_target:
+    config['javac']['classpath'] = sorted(javac_classpath)
+    config['javac']['interface_classpath'] = sorted(javac_interface_classpath)
+    # Direct() will be of type 'java_annotation_processor', and so not included
+    # in All('java_library').
+    # Annotation processors run as part of the build, so need host_jar_path.
+    config['javac']['processor_classpath'] = [
+        c['host_jar_path'] for c in processor_deps.Direct()
+        if c.get('host_jar_path')
+    ]
+    config['javac']['processor_classpath'] += [
+        c['host_jar_path'] for c in processor_deps.All('java_library')
+    ]
+    config['javac']['processor_classes'] = [
+        c['main_class'] for c in processor_deps.Direct()]
+    deps_info['javac_full_classpath'] = list(javac_full_classpath)
+    deps_info['javac_full_interface_classpath'] = list(
+        javac_full_interface_classpath)
+  elif options.type == 'android_app_bundle':
+    # bundles require javac_full_classpath to create .aab.jar.info and require
+    # javac_full_interface_classpath for lint.
+    javac_full_classpath = OrderedSet()
+    javac_full_interface_classpath = OrderedSet()
+    for d in deps.Direct('android_app_bundle_module'):
+      javac_full_classpath.update(d['javac_full_classpath'])
+      javac_full_interface_classpath.update(d['javac_full_interface_classpath'])
+      javac_full_classpath.add(d['unprocessed_jar_path'])
+      javac_full_interface_classpath.add(d['interface_jar_path'])
+    deps_info['javac_full_classpath'] = list(javac_full_classpath)
+    deps_info['javac_full_interface_classpath'] = list(
+        javac_full_interface_classpath)
+
+  if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
+                      'android_app_bundle'):
+    deps_info['device_classpath'] = device_classpath
+    if options.tested_apk_config:
+      deps_info['device_classpath_extended'] = device_classpath_extended
+
+  if options.type in ('android_apk', 'dist_jar'):
+    all_interface_jars = []
+    if options.interface_jar_path:
+      all_interface_jars.append(options.interface_jar_path)
+    all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
+
+    config['dist_jar'] = {
+      'all_interface_jars': all_interface_jars,
+    }
+
+  if is_apk_or_module_target:
+    manifest = AndroidManifest(options.android_manifest)
+    deps_info['package_name'] = manifest.GetPackageName()
+    if not options.tested_apk_config and manifest.GetInstrumentationElements():
+      # This must then have instrumentation only for itself.
+      manifest.CheckInstrumentationElements(manifest.GetPackageName())
+
+    library_paths = []
+    java_libraries_list = None
+    if options.shared_libraries_runtime_deps:
+      library_paths = _ExtractSharedLibsFromRuntimeDeps(
+          options.shared_libraries_runtime_deps)
+      java_libraries_list = _CreateJavaLibrariesList(library_paths)
+      all_inputs.append(options.shared_libraries_runtime_deps)
+
+    secondary_abi_library_paths = []
+    if options.secondary_abi_shared_libraries_runtime_deps:
+      secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
+          options.secondary_abi_shared_libraries_runtime_deps)
+      all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
+
+    native_library_placeholder_paths = build_utils.ParseGnList(
+        options.native_lib_placeholders)
+
+    secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+        options.secondary_native_lib_placeholders)
+
+    loadable_modules = build_utils.ParseGnList(options.loadable_modules)
+    secondary_abi_loadable_modules = build_utils.ParseGnList(
+        options.secondary_abi_loadable_modules)
+
+    config['native'] = {
+        'libraries':
+        library_paths,
+        'native_library_placeholders':
+        native_library_placeholder_paths,
+        'secondary_abi_libraries':
+        secondary_abi_library_paths,
+        'secondary_native_library_placeholders':
+        secondary_native_library_placeholder_paths,
+        'java_libraries_list':
+        java_libraries_list,
+        'uncompress_shared_libraries':
+        options.uncompress_shared_libraries,
+        'library_always_compress':
+        options.library_always_compress,
+        'library_renames':
+        options.library_renames,
+        'loadable_modules':
+        loadable_modules,
+        'secondary_abi_loadable_modules':
+        secondary_abi_loadable_modules,
+    }
+    config['assets'], config['uncompressed_assets'], locale_paks = (
+        _MergeAssets(deps.All('android_assets')))
+
+    deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets(
+        config['uncompressed_assets'], locale_paks)
+
+    config['extra_android_manifests'] = []
+    for c in extra_manifest_deps:
+      config['extra_android_manifests'].extend(
+          c.get('mergeable_android_manifests', []))
+
+    # Collect java resources
+    java_resources_jars = [d['java_resources_jar'] for d in all_library_deps
+                          if 'java_resources_jar' in d]
+    if options.tested_apk_config:
+      tested_apk_resource_jars = [d['java_resources_jar']
+                                  for d in tested_apk_library_deps
+                                  if 'java_resources_jar' in d]
+      java_resources_jars = [jar for jar in java_resources_jars
+                             if jar not in tested_apk_resource_jars]
+    config['java_resources_jars'] = java_resources_jars
+
+  if options.java_resources_jar_path:
+    deps_info['java_resources_jar'] = options.java_resources_jar_path
+
+  # DYNAMIC FEATURE MODULES:
+  # Make sure that dependencies that exist on the base module
+  # are not duplicated on the feature module.
+  if base_module_build_config:
+    base = base_module_build_config
+    RemoveObjDups(config, base, 'deps_info', 'device_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
+    RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
+    RemoveObjDups(config, base, 'extra_android_manifests')
+
+  if is_java_target:
+    jar_to_target = {}
+    _AddJarMapping(jar_to_target, [deps_info])
+    _AddJarMapping(jar_to_target, all_deps)
+    if base_module_build_config:
+      _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+    if options.tested_apk_config:
+      _AddJarMapping(jar_to_target, [tested_apk_config])
+      for jar, target in zip(tested_apk_config['javac_full_classpath'],
+                             tested_apk_config['javac_full_classpath_targets']):
+        jar_to_target[jar] = target
+
+    # Used by bytecode_processor to give better error message when missing
+    # deps are found.
+    config['deps_info']['javac_full_classpath_targets'] = [
+        jar_to_target[x] for x in deps_info['javac_full_classpath']
+    ]
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, options.build_config,
+                             sorted(set(all_inputs)))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/write_build_config.pydeps b/src/build/android/gyp/write_build_config.pydeps
new file mode 100644
index 0000000..b1276bc
--- /dev/null
+++ b/src/build/android/gyp/write_build_config.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+write_build_config.py
diff --git a/src/build/android/gyp/write_native_libraries_java.py b/src/build/android/gyp/write_native_libraries_java.py
new file mode 100755
index 0000000..322b8b2
--- /dev/null
+++ b/src/build/android/gyp/write_native_libraries_java.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes list of native libraries to srcjar file."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+_NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+//     build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+    public static final int CPU_FAMILY_UNKNOWN = 0;
+    public static final int CPU_FAMILY_ARM = 1;
+    public static final int CPU_FAMILY_MIPS = 2;
+    public static final int CPU_FAMILY_X86 = 3;
+
+    // Set to true to enable the use of the Chromium Linker.
+    public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+    public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+    public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+    // This is the list of native libraries to be loaded (in the correct order)
+    // by LibraryLoader.java.
+    public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+    public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
+
+
+def _FormatLibraryName(library_name):
+  filename = os.path.split(library_name)[1]
+  assert filename.startswith('lib')
+  assert filename.endswith('.so')
+  # Remove lib prefix and .so suffix.
+  return '"%s"' % filename[3:-3]
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--final', action='store_true', help='Use final fields.')
+  parser.add_argument(
+      '--enable-chromium-linker',
+      action='store_true',
+      help='Enable Chromium linker.')
+  parser.add_argument(
+      '--load-library-from-apk',
+      action='store_true',
+      help='Load libaries from APK without uncompressing.')
+  parser.add_argument(
+      '--use-modern-linker', action='store_true', help='To use ModernLinker.')
+  parser.add_argument(
+      '--native-libraries-list', help='File with list of native libraries.')
+  parser.add_argument(
+      '--cpu-family',
+      choices={
+          'CPU_FAMILY_ARM', 'CPU_FAMILY_X86', 'CPU_FAMILY_MIPS',
+          'CPU_FAMILY_UNKNOWN'
+      },
+      required=True,
+      default='CPU_FAMILY_UNKNOWN',
+      help='CPU family.')
+  parser.add_argument(
+      '--main-component-library',
+      help='If used, the list of native libraries will only contain this '
+      'library. Dependencies are found in the library\'s "NEEDED" section.')
+
+  parser.add_argument(
+      '--output', required=True, help='Path to the generated srcjar file.')
+
+  options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+  assert (options.enable_chromium_linker or not options.load_library_from_apk)
+
+  native_libraries_list = []
+  if options.main_component_library:
+    native_libraries_list.append(
+        _FormatLibraryName(options.main_component_library))
+  elif options.native_libraries_list:
+    with open(options.native_libraries_list) as f:
+      for path in f:
+        path = path.strip()
+        native_libraries_list.append(_FormatLibraryName(path))
+
+  def bool_str(value):
+    if value:
+      return ' = true'
+    elif options.final:
+      return ' = false'
+    return ''
+
+  format_dict = {
+      'MAYBE_FINAL': 'final ' if options.final else '',
+      'USE_LINKER': bool_str(options.enable_chromium_linker),
+      'USE_LIBRARY_IN_ZIP_FILE': bool_str(options.load_library_from_apk),
+      'USE_MODERN_LINKER': bool_str(options.use_modern_linker),
+      'LIBRARIES': ','.join(native_libraries_list),
+      'CPU_FAMILY': options.cpu_family,
+  }
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f.name, 'w') as srcjar_file:
+      build_utils.AddToZipHermetic(
+          zip_file=srcjar_file,
+          zip_path='org/chromium/build/NativeLibraries.java',
+          data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
+
+  if options.depfile:
+    assert options.native_libraries_list
+    build_utils.WriteDepfile(options.depfile,
+                             options.output,
+                             inputs=[options.native_libraries_list])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/write_native_libraries_java.pydeps b/src/build/android/gyp/write_native_libraries_java.pydeps
new file mode 100644
index 0000000..f5176ef
--- /dev/null
+++ b/src/build/android/gyp/write_native_libraries_java.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+write_native_libraries_java.py
diff --git a/src/build/android/gyp/zip.py b/src/build/android/gyp/zip.py
new file mode 100755
index 0000000..6b40540
--- /dev/null
+++ b/src/build/android/gyp/zip.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Archives a set of files."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser(args)
+  parser.add_argument('--input-files', help='GN-list of files to zip.')
+  parser.add_argument(
+      '--input-files-base-dir',
+      help='Paths in the archive will be relative to this directory')
+  parser.add_argument('--input-zips', help='GN-list of zips to merge.')
+  parser.add_argument(
+      '--input-zips-excluded-globs',
+      help='GN-list of globs for paths to exclude.')
+  parser.add_argument('--output', required=True, help='Path to output archive.')
+  compress_group = parser.add_mutually_exclusive_group()
+  compress_group.add_argument(
+      '--compress', action='store_true', help='Compress entries')
+  compress_group.add_argument(
+      '--no-compress',
+      action='store_false',
+      dest='compress',
+      help='Do not compress entries')
+  build_utils.AddDepfileOption(parser)
+  options = parser.parse_args(args)
+
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f.name, 'w') as out_zip:
+      depfile_deps = None
+      if options.input_files:
+        files = build_utils.ParseGnList(options.input_files)
+        build_utils.DoZip(
+            files,
+            out_zip,
+            base_dir=options.input_files_base_dir,
+            compress_fn=lambda _: options.compress)
+
+      if options.input_zips:
+        files = build_utils.ParseGnList(options.input_zips)
+        depfile_deps = files
+        path_transform = None
+        if options.input_zips_excluded_globs:
+          globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+          path_transform = (
+              lambda p: None if build_utils.MatchesGlob(p, globs) else p)
+        build_utils.MergeZips(
+            out_zip,
+            files,
+            path_transform=path_transform,
+            compress=options.compress)
+
+  # Depfile used only by dist_jar().
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             options.output,
+                             inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/zip.pydeps b/src/build/android/gyp/zip.pydeps
new file mode 100644
index 0000000..36affd1
--- /dev/null
+++ b/src/build/android/gyp/zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+zip.py
diff --git a/src/build/android/host_heartbeat.py b/src/build/android/host_heartbeat.py
new file mode 100755
index 0000000..4e11c5c
--- /dev/null
+++ b/src/build/android/host_heartbeat.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+import devil_chromium
+from devil.android import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+  devil_chromium.Initialize()
+
+  while True:
+    try:
+      devices = device_utils.DeviceUtils.HealthyDevices(denylist=None)
+      for d in devices:
+        d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+                          check_return=True)
+    except:
+      # Keep the heatbeat running bypassing all errors.
+      pass
+    time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/incremental_install/BUILD.gn b/src/build/android/incremental_install/BUILD.gn
new file mode 100644
index 0000000..8d26e96
--- /dev/null
+++ b/src/build/android/incremental_install/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("bootstrap_java") {
+  sources = [
+    "java/org/chromium/incrementalinstall/BootstrapApplication.java",
+    "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
+    "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java",
+    "java/org/chromium/incrementalinstall/LockFile.java",
+    "java/org/chromium/incrementalinstall/Reflect.java",
+    "java/org/chromium/incrementalinstall/SecondInstrumentation.java",
+  ]
+  jacoco_never_instrument = true
+  no_build_hooks = true
+}
+
+dist_dex("apk_dex") {
+  output = "$target_out_dir/apk.dex"
+  deps = [ ":bootstrap_java" ]
+}
diff --git a/src/build/android/incremental_install/README.md b/src/build/android/incremental_install/README.md
new file mode 100644
index 0000000..9a27b8c
--- /dev/null
+++ b/src/build/android/incremental_install/README.md
@@ -0,0 +1,83 @@
+# Incremental Install
+
+Incremental Install is a way of building & deploying an APK that tries to
+minimize the time it takes to make a change and see that change running on
+device. They work best with `is_component_build=true`, and do *not* require a
+rooted device.
+
+## Building
+
+Add the gn arg:
+
+    incremental_install = true
+
+This causes all apks to be built as incremental except for denylisted ones.
+
+## Running
+
+It is not enough to `adb install` them. You must use the generated wrapper
+script:
+
+    out/Debug/bin/your_apk run
+    out/Debug/bin/run_chrome_public_test_apk  # Automatically sets --fast-local-dev
+
+# How it Works
+
+## Overview
+
+The basic idea is to sideload .dex and .so files to `/data/local/tmp` rather
+than bundling them in the .apk. Then, when making a change, only the changed
+.dex / .so needs to be pushed to the device.
+
+Faster Builds:
+
+ * No `final_dex` step (where all .dex files are merged into one)
+ * No need to rebuild .apk for code-only changes (but required for resources)
+ * Apks sign faster because they are smaller.
+
+Faster Installs:
+
+ * The .apk is smaller, and so faster to verify.
+ * No need to run `adb install` for code-only changes.
+ * Only changed .so / .dex files are pushed. MD5s of existing on-device files
+   are cached on host computer.
+
+Slower Initial Runs:
+
+ * The first time you run an incremental .apk, the `DexOpt` needs to run on all
+   .dex files. This step is normally done during `adb install`, but is done on
+   start-up for incremental apks.
+   * DexOpt results are cached, so subsequent runs are faster.
+   * The slowdown varies significantly based on the Android version. Android O+
+     has almost no visible slow-down.
+
+Caveats:
+ * Isolated processes (on L+) are incompatible with incremental install. As a
+   work-around, isolated processes are disabled when building incremental apks.
+ * Android resources, assets, and `loadable_modules` are not sideloaded (they
+   remain in the apk), so builds & installs that modify any of these are not as
+   fast as those that modify only .java / .cc.
+ * Since files are sideloaded to `/data/local/tmp`, you need to use the wrapper
+   scripts to uninstall them fully. E.g.:
+   ```shell
+   out/Default/bin/chrome_public_apk uninstall
+   ```
+
+## The Code
+
+All incremental apks have the same classes.dex, which is built from:
+
+    //build/android/incremental_install:bootstrap_java
+
+They also have a transformed `AndroidManifest.xml`, which overrides the the
+main application class and any instrumentation classes so that they instead
+point to `BootstrapApplication`. This is built by:
+
+    //build/android/incremental_install/generate_android_manifest.py
+
+Wrapper scripts and install logic is contained in:
+
+    //build/android/incremental_install/create_install_script.py
+    //build/android/incremental_install/installer.py
+
+Finally, GN logic for incremental apks is sprinkled throughout.
diff --git a/src/build/android/incremental_install/__init__.py b/src/build/android/incremental_install/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/src/build/android/incremental_install/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/incremental_install/generate_android_manifest.py b/src/build/android/incremental_install/generate_android_manifest.py
new file mode 100755
index 0000000..e069dab
--- /dev/null
+++ b/src/build/android/incremental_install/generate_android_manifest.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an incremental APK.
+
+Given the manifest file for the real APK, generates an AndroidManifest.xml with
+the application class changed to IncrementalApplication.
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp'))
+from util import build_utils
+from util import manifest_utils
+from util import resource_utils
+
+_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
+_META_DATA_APP_NAME = 'incremental-install-real-app'
+_DEFAULT_APPLICATION_CLASS = 'android.app.Application'
+_META_DATA_INSTRUMENTATION_NAMES = [
+    'incremental-install-real-instrumentation-0',
+    'incremental-install-real-instrumentation-1',
+]
+_INCREMENTAL_INSTRUMENTATION_CLASSES = [
+    'android.app.Instrumentation',
+    'org.chromium.incrementalinstall.SecondInstrumentation',
+]
+
+
+def _AddNamespace(name):
+  """Adds the android namespace prefix to the given identifier."""
+  return '{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, name)
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--src-manifest', required=True, help='The main manifest of the app')
+  parser.add_argument('--disable-isolated-processes',
+                      help='Changes all android:isolatedProcess to false. '
+                           'This is required on Android M+',
+                      action='store_true')
+  parser.add_argument(
+      '--out-apk', required=True, help='Path to output .ap_ file')
+  parser.add_argument(
+      '--in-apk', required=True, help='Path to non-incremental .ap_ file')
+  parser.add_argument(
+      '--aapt2-path', required=True, help='Path to the Android aapt tool')
+  parser.add_argument(
+      '--android-sdk-jars', help='GN List of resource apks to include.')
+
+  ret = parser.parse_args(build_utils.ExpandFileArgs(args))
+  ret.android_sdk_jars = build_utils.ParseGnList(ret.android_sdk_jars)
+  return ret
+
+
+def _CreateMetaData(parent, name, value):
+  meta_data_node = ElementTree.SubElement(parent, 'meta-data')
+  meta_data_node.set(_AddNamespace('name'), name)
+  meta_data_node.set(_AddNamespace('value'), value)
+
+
+def _ProcessManifest(path, arsc_package_name, disable_isolated_processes):
+  doc, manifest_node, app_node = manifest_utils.ParseManifest(path)
+
+  # Ensure the manifest package matches that of the apk's arsc package
+  # So that resource references resolve correctly. The actual manifest
+  # package name is set via --rename-manifest-package.
+  manifest_node.set('package', arsc_package_name)
+
+  # Pylint for some reason things app_node is an int.
+  # pylint: disable=no-member
+  real_app_class = app_node.get(_AddNamespace('name'),
+                                _DEFAULT_APPLICATION_CLASS)
+  app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME)
+  # pylint: enable=no-member
+  _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class)
+
+  # Seems to be a bug in ElementTree, as doc.find() doesn't work here.
+  instrumentation_nodes = doc.findall('instrumentation')
+  assert len(instrumentation_nodes) <= 2, (
+      'Need to update incremental install to support >2 <instrumentation> tags')
+  for i, instrumentation_node in enumerate(instrumentation_nodes):
+    real_instrumentation_class = instrumentation_node.get(_AddNamespace('name'))
+    instrumentation_node.set(_AddNamespace('name'),
+                             _INCREMENTAL_INSTRUMENTATION_CLASSES[i])
+    _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i],
+                    real_instrumentation_class)
+
+  ret = ElementTree.tostring(doc.getroot(), encoding='UTF-8')
+  # Disable check for page-aligned native libraries.
+  ret = ret.replace(b'extractNativeLibs="false"', b'extractNativeLibs="true"')
+  if disable_isolated_processes:
+    ret = ret.replace(b'isolatedProcess="true"', b'isolatedProcess="false"')
+  return ret
+
+
+def main(raw_args):
+  options = _ParseArgs(raw_args)
+
+  arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path,
+                                                      options.in_apk)
+  # Extract version from the compiled manifest since it might have been set
+  # via aapt, and not exist in the manifest's text form.
+  version_code, version_name, manifest_package = (
+      resource_utils.ExtractBinaryManifestValues(options.aapt2_path,
+                                                 options.in_apk))
+
+  new_manifest_data = _ProcessManifest(options.src_manifest, arsc_package,
+                                       options.disable_isolated_processes)
+  with tempfile.NamedTemporaryFile() as tmp_manifest, \
+      tempfile.NamedTemporaryFile() as tmp_apk:
+    tmp_manifest.write(new_manifest_data)
+    tmp_manifest.flush()
+    cmd = [
+        options.aapt2_path, 'link', '-o', tmp_apk.name, '--manifest',
+        tmp_manifest.name, '-I', options.in_apk, '--replace-version',
+        '--version-code', version_code, '--version-name', version_name,
+        '--rename-manifest-package', manifest_package, '--debug-mode'
+    ]
+    for j in options.android_sdk_jars:
+      cmd += ['-I', j]
+    subprocess.check_call(cmd)
+    with zipfile.ZipFile(options.out_apk, 'w') as z:
+      path_transform = lambda p: None if p != 'AndroidManifest.xml' else p
+      build_utils.MergeZips(z, [tmp_apk.name], path_transform=path_transform)
+      path_transform = lambda p: None if p == 'AndroidManifest.xml' else p
+      build_utils.MergeZips(z, [options.in_apk], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/incremental_install/generate_android_manifest.pydeps b/src/build/android/incremental_install/generate_android_manifest.pydeps
new file mode 100644
index 0000000..568ea1e
--- /dev/null
+++ b/src/build/android/incremental_install/generate_android_manifest.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/manifest_utils.py
+../gyp/util/resource_utils.py
+generate_android_manifest.py
diff --git a/src/build/android/incremental_install/installer.py b/src/build/android/incremental_install/installer.py
new file mode 100755
index 0000000..9625822
--- /dev/null
+++ b/src/build/android/incremental_install/installer.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install *_incremental.apk targets as well as their dependent files."""
+
+import argparse
+import collections
+import functools
+import glob
+import json
+import logging
+import os
+import posixpath
+import shutil
+import sys
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_utils
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.utils import time_profile
+
+prev_sys_path = list(sys.path)
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+import dex
+from util import build_utils
+sys.path = prev_sys_path
+
+
+_R8_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib',
+                        'r8.jar')
+
+
+def _DeviceCachePath(device):
+  file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+  return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def _Execute(concurrently, *funcs):
+  """Calls all functions in |funcs| concurrently or in sequence."""
+  timer = time_profile.TimeProfile()
+  if concurrently:
+    reraiser_thread.RunAsync(funcs)
+  else:
+    for f in funcs:
+      f()
+  timer.Stop(log=False)
+  return timer
+
+
+def _GetDeviceIncrementalDir(package):
+  """Returns the device path to put incremental files for the given package."""
+  return '/data/local/tmp/incremental-app-%s' % package
+
+
+def _IsStale(src_paths, dest):
+  """Returns if |dest| is older than any of |src_paths|, or missing."""
+  if not os.path.exists(dest):
+    return True
+  dest_time = os.path.getmtime(dest)
+  for path in src_paths:
+    if os.path.getmtime(path) > dest_time:
+      return True
+  return False
+
+
+def _AllocateDexShards(dex_files):
+  """Divides input dex files into buckets."""
+  # Goals:
+  # * Make shards small enough that they are fast to merge.
+  # * Minimize the number of shards so they load quickly on device.
+  # * Partition files into shards such that a change in one file results in only
+  #   one shard having to be re-created.
+  shards = collections.defaultdict(list)
+  # As of Oct 2019, 10 shards results in a min/max size of 582K/2.6M.
+  NUM_CORE_SHARDS = 10
+  # As of Oct 2019, 17 dex files are larger than 1M.
+  SHARD_THRESHOLD = 2**20
+  for src_path in dex_files:
+    if os.path.getsize(src_path) >= SHARD_THRESHOLD:
+      # Use the path as the name rather than an incrementing number to ensure
+      # that it shards to the same name every time.
+      name = os.path.relpath(src_path, constants.GetOutDirectory()).replace(
+          os.sep, '.')
+      shards[name].append(src_path)
+    else:
+      name = 'shard{}.dex.jar'.format(hash(src_path) % NUM_CORE_SHARDS)
+      shards[name].append(src_path)
+  logging.info('Sharding %d dex files into %d buckets', len(dex_files),
+               len(shards))
+  return shards
+
+
+def _CreateDexFiles(shards, dex_staging_dir, min_api, use_concurrency):
+  """Creates dex files within |dex_staging_dir| defined by |shards|."""
+  tasks = []
+  for name, src_paths in shards.iteritems():
+    dest_path = os.path.join(dex_staging_dir, name)
+    if _IsStale(src_paths, dest_path):
+      tasks.append(
+          functools.partial(dex.MergeDexForIncrementalInstall, _R8_PATH,
+                            src_paths, dest_path, min_api))
+
+  # TODO(agrieve): It would be more performant to write a custom d8.jar
+  #     wrapper in java that would process these in bulk, rather than spinning
+  #     up a new process for each one.
+  _Execute(use_concurrency, *tasks)
+
+  # Remove any stale shards.
+  for name in os.listdir(dex_staging_dir):
+    if name not in shards:
+      os.unlink(os.path.join(dex_staging_dir, name))
+
+
+def Uninstall(device, package, enable_device_cache=False):
+  """Uninstalls and removes all incremental files for the given package."""
+  main_timer = time_profile.TimeProfile()
+  device.Uninstall(package)
+  if enable_device_cache:
+    # Uninstall is rare, so just wipe the cache in this case.
+    cache_path = _DeviceCachePath(device)
+    if os.path.exists(cache_path):
+      os.unlink(cache_path)
+  device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)],
+                         check_return=True)
+  logging.info('Uninstall took %s seconds.', main_timer.GetDelta())
+
+
+def Install(device, install_json, apk=None, enable_device_cache=False,
+            use_concurrency=True, permissions=()):
+  """Installs the given incremental apk and all required supporting files.
+
+  Args:
+    device: A DeviceUtils instance (to install to).
+    install_json: Path to .json file or already parsed .json object.
+    apk: An existing ApkHelper instance for the apk (optional).
+    enable_device_cache: Whether to enable on-device caching of checksums.
+    use_concurrency: Whether to speed things up using multiple threads.
+    permissions: A list of the permissions to grant, or None to grant all
+                 non-denylisted permissions in the manifest.
+  """
+  if isinstance(install_json, basestring):
+    with open(install_json) as f:
+      install_dict = json.load(f)
+  else:
+    install_dict = install_json
+
+  main_timer = time_profile.TimeProfile()
+  install_timer = time_profile.TimeProfile()
+  push_native_timer = time_profile.TimeProfile()
+  merge_dex_timer = time_profile.TimeProfile()
+  push_dex_timer = time_profile.TimeProfile()
+
+  def fix_path(p):
+    return os.path.normpath(os.path.join(constants.GetOutDirectory(), p))
+
+  if not apk:
+    apk = apk_helper.ToHelper(fix_path(install_dict['apk_path']))
+  split_globs = [fix_path(p) for p in install_dict['split_globs']]
+  native_libs = [fix_path(p) for p in install_dict['native_libs']]
+  dex_files = [fix_path(p) for p in install_dict['dex_files']]
+  show_proguard_warning = install_dict.get('show_proguard_warning')
+
+  apk_package = apk.GetPackageName()
+  device_incremental_dir = _GetDeviceIncrementalDir(apk_package)
+  dex_staging_dir = os.path.join(constants.GetOutDirectory(),
+                                 'incremental-install',
+                                 install_dict['apk_path'])
+  device_dex_dir = posixpath.join(device_incremental_dir, 'dex')
+
+  # Install .apk(s) if any of them have changed.
+  def do_install():
+    install_timer.Start()
+    if split_globs:
+      splits = []
+      for split_glob in split_globs:
+        splits.extend((f for f in glob.glob(split_glob)))
+      device.InstallSplitApk(
+          apk,
+          splits,
+          allow_downgrade=True,
+          reinstall=True,
+          allow_cached_props=True,
+          permissions=permissions)
+    else:
+      device.Install(
+          apk, allow_downgrade=True, reinstall=True, permissions=permissions)
+    install_timer.Stop(log=False)
+
+  # Push .so and .dex files to the device (if they have changed).
+  def do_push_files():
+
+    def do_push_native():
+      push_native_timer.Start()
+      if native_libs:
+        with build_utils.TempDir() as temp_dir:
+          device_lib_dir = posixpath.join(device_incremental_dir, 'lib')
+          for path in native_libs:
+            # Note: Can't use symlinks as they don't work when
+            # "adb push parent_dir" is used (like we do here).
+            shutil.copy(path, os.path.join(temp_dir, os.path.basename(path)))
+          device.PushChangedFiles([(temp_dir, device_lib_dir)],
+                                  delete_device_stale=True)
+      push_native_timer.Stop(log=False)
+
+    def do_merge_dex():
+      merge_dex_timer.Start()
+      shards = _AllocateDexShards(dex_files)
+      build_utils.MakeDirectory(dex_staging_dir)
+      _CreateDexFiles(shards, dex_staging_dir, apk.GetMinSdkVersion(),
+                      use_concurrency)
+      merge_dex_timer.Stop(log=False)
+
+    def do_push_dex():
+      push_dex_timer.Start()
+      device.PushChangedFiles([(dex_staging_dir, device_dex_dir)],
+                              delete_device_stale=True)
+      push_dex_timer.Stop(log=False)
+
+    _Execute(use_concurrency, do_push_native, do_merge_dex)
+    do_push_dex()
+
+  def check_device_configured():
+    target_sdk_version = int(apk.GetTargetSdkVersion())
+    # Beta Q builds apply allowlist to targetSdk=28 as well.
+    if target_sdk_version >= 28 and device.build_version_sdk >= 28:
+      # In P, there are two settings:
+      #  * hidden_api_policy_p_apps
+      #  * hidden_api_policy_pre_p_apps
+      # In Q, there is just one:
+      #  * hidden_api_policy
+      if device.build_version_sdk == 28:
+        setting_name = 'hidden_api_policy_p_apps'
+      else:
+        setting_name = 'hidden_api_policy'
+      apis_allowed = ''.join(
+          device.RunShellCommand(['settings', 'get', 'global', setting_name],
+                                 check_return=True))
+      if apis_allowed.strip() not in '01':
+        msg = """\
+Cannot use incremental installs on Android P+ without first enabling access to
+non-SDK interfaces (https://developer.android.com/preview/non-sdk-q).
+
+To enable access:
+   adb -s {0} shell settings put global {1} 0
+To restore back to default:
+   adb -s {0} shell settings delete global {1}"""
+        raise Exception(msg.format(device.serial, setting_name))
+
+  cache_path = _DeviceCachePath(device)
+  def restore_cache():
+    if not enable_device_cache:
+      return
+    if os.path.exists(cache_path):
+      logging.info('Using device cache: %s', cache_path)
+      with open(cache_path) as f:
+        device.LoadCacheData(f.read())
+      # Delete the cached file so that any exceptions cause it to be cleared.
+      os.unlink(cache_path)
+    else:
+      logging.info('No device cache present: %s', cache_path)
+
+  def save_cache():
+    if not enable_device_cache:
+      return
+    with open(cache_path, 'w') as f:
+      f.write(device.DumpCacheData())
+      logging.info('Wrote device cache: %s', cache_path)
+
+  # Create 2 lock files:
+  # * install.lock tells the app to pause on start-up (until we release it).
+  # * firstrun.lock is used by the app to pause all secondary processes until
+  #   the primary process finishes loading the .dex / .so files.
+  def create_lock_files():
+    # Creates or zeros out lock files.
+    cmd = ('D="%s";'
+           'mkdir -p $D &&'
+           'echo -n >$D/install.lock 2>$D/firstrun.lock')
+    device.RunShellCommand(
+        cmd % device_incremental_dir, shell=True, check_return=True)
+
+  # The firstrun.lock is released by the app itself.
+  def release_installer_lock():
+    device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir,
+                           check_return=True, shell=True)
+
+  # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't
+  # been designed for multi-threading. Enabling only because this is a
+  # developer-only tool.
+  setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache,
+                         check_device_configured)
+
+  _Execute(use_concurrency, do_install, do_push_files)
+
+  finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache)
+
+  logging.info(
+      'Install of %s took %s seconds (setup=%s, install=%s, lib_push=%s, '
+      'dex_merge=%s dex_push=%s, finalize=%s)', os.path.basename(apk.path),
+      main_timer.GetDelta(), setup_timer.GetDelta(), install_timer.GetDelta(),
+      push_native_timer.GetDelta(), merge_dex_timer.GetDelta(),
+      push_dex_timer.GetDelta(), finalize_timer.GetDelta())
+  if show_proguard_warning:
+    logging.warning('Target had proguard enabled, but incremental install uses '
+                    'non-proguarded .dex files. Performance characteristics '
+                    'may differ.')
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('json_path',
+                      help='The path to the generated incremental apk .json.')
+  parser.add_argument('-d', '--device', dest='device',
+                      help='Target device for apk to install on.')
+  parser.add_argument('--uninstall',
+                      action='store_true',
+                      default=False,
+                      help='Remove the app and all side-loaded files.')
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('--no-threading',
+                      action='store_false',
+                      default=True,
+                      dest='threading',
+                      help='Do not install and push concurrently')
+  parser.add_argument('--no-cache',
+                      action='store_false',
+                      default=True,
+                      dest='cache',
+                      help='Do not use cached information about what files are '
+                           'currently on the target device.')
+  parser.add_argument('-v',
+                      '--verbose',
+                      dest='verbose_count',
+                      default=0,
+                      action='count',
+                      help='Verbose level (multiple times for more)')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+  # Retries are annoying when commands fail for legitimate reasons. Might want
+  # to enable them if this is ever used on bots though.
+  device = device_utils.DeviceUtils.HealthyDevices(
+      device_arg=args.device,
+      default_retries=0,
+      enable_device_files_cache=True)[0]
+
+  if args.uninstall:
+    with open(args.json_path) as f:
+      install_dict = json.load(f)
+    apk = apk_helper.ToHelper(install_dict['apk_path'])
+    Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache)
+  else:
+    Install(device, args.json_path, enable_device_cache=args.cache,
+            use_concurrency=args.threading)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
new file mode 100644
index 0000000..f7003f2
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
@@ -0,0 +1,297 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Application;
+import android.app.Instrumentation;
+import android.content.Context;
+import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageManager;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.os.Bundle;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Application that replaces itself with another Application (as defined in
+ * an AndroidManifext.xml meta-data tag). It loads the other application only
+ * after side-loading its .so and .dex files from /data/local/tmp.
+ *
+ * This class is highly dependent on the private implementation details of
+ * Android's ActivityThread.java. However, it has been tested to work with
+ * JellyBean through Marshmallow.
+ */
+public final class BootstrapApplication extends Application {
+    private static final String TAG = "incrementalinstall";
+    private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-";
+    private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app";
+    private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 =
+            "incremental-install-real-instrumentation-0";
+    private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 =
+            "incremental-install-real-instrumentation-1";
+
+    private ClassLoaderPatcher mClassLoaderPatcher;
+    private Application mRealApplication;
+    private Instrumentation mOrigInstrumentation;
+    private Instrumentation mRealInstrumentation;
+    private Object mStashedProviderList;
+    private Object mActivityThread;
+    public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner.
+
+    @Override
+    protected void attachBaseContext(Context context) {
+        super.attachBaseContext(context);
+        try {
+            mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"),
+                    "currentActivityThread");
+            mClassLoaderPatcher = new ClassLoaderPatcher(context);
+
+            mOrigInstrumentation =
+                    (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation");
+            Context instContext = mOrigInstrumentation.getContext();
+            if (instContext == null) {
+                instContext = context;
+            }
+
+            // When running with an instrumentation that lives in a different package from the
+            // application, we must load the dex files and native libraries from both pacakges.
+            // This logic likely won't work when the instrumentation is incremental, but the app is
+            // non-incremental. This configuration isn't used right now though.
+            String appPackageName = getPackageName();
+            String instPackageName = instContext.getPackageName();
+            boolean instPackageNameDiffers = !appPackageName.equals(instPackageName);
+            Log.i(TAG, "App PackageName: " + appPackageName);
+            if (instPackageNameDiffers) {
+                Log.i(TAG, "Inst PackageName: " + instPackageName);
+            }
+
+            File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName);
+            File appLibDir = new File(appIncrementalRootDir, "lib");
+            File appDexDir = new File(appIncrementalRootDir, "dex");
+            File appInstallLockFile = new File(appIncrementalRootDir, "install.lock");
+            File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock");
+            File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName);
+            File instLibDir = new File(instIncrementalRootDir, "lib");
+            File instDexDir = new File(instIncrementalRootDir, "dex");
+            File instInstallLockFile = new File(instIncrementalRootDir, "install.lock");
+            File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock");
+
+            boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile)
+                    || (instPackageNameDiffers
+                               && LockFile.installerLockExists(instFirstRunLockFile));
+            if (isFirstRun) {
+                if (mClassLoaderPatcher.mIsPrimaryProcess) {
+                    // Wait for incremental_install.py to finish.
+                    LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000);
+                    LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000);
+                } else {
+                    // Wait for the browser process to create the optimized dex files
+                    // and copy the library files.
+                    LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000);
+                    LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000);
+                }
+            }
+
+            mClassLoaderPatcher.importNativeLibs(instLibDir);
+            sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir, instPackageName);
+            if (instPackageNameDiffers) {
+                mClassLoaderPatcher.importNativeLibs(appLibDir);
+                mClassLoaderPatcher.loadDexFiles(appDexDir, appPackageName);
+            }
+
+            if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) {
+                LockFile.clearInstallerLock(appFirstRunLockFile);
+                if (instPackageNameDiffers) {
+                    LockFile.clearInstallerLock(instFirstRunLockFile);
+                }
+            }
+
+            // mInstrumentationAppDir is one of a set of fields that is initialized only when
+            // instrumentation is active.
+            if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) {
+                String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0;
+                if (mOrigInstrumentation instanceof SecondInstrumentation) {
+                    metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1;
+                }
+                mRealInstrumentation =
+                        initInstrumentation(getClassNameFromMetadata(metaDataName, instContext));
+            } else {
+                Log.i(TAG, "No instrumentation active.");
+            }
+
+            // Even when instrumentation is not enabled, ActivityThread uses a default
+            // Instrumentation instance internally. We hook it here in order to hook into the
+            // call to Instrumentation.onCreate().
+            BootstrapInstrumentation bootstrapInstrumentation = new BootstrapInstrumentation(this);
+            populateInstrumenationFields(bootstrapInstrumentation);
+            Reflect.setField(mActivityThread, "mInstrumentation", bootstrapInstrumentation);
+
+            // attachBaseContext() is called from ActivityThread#handleBindApplication() and
+            // Application#mApplication is changed right after we return. Thus, we cannot swap
+            // the Application instances until onCreate() is called.
+            String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context);
+            Log.i(TAG, "Instantiating " + realApplicationName);
+            Instrumentation anyInstrumentation =
+                    mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation;
+            mRealApplication = anyInstrumentation.newApplication(
+                    getClassLoader(), realApplicationName, context);
+
+            // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate
+            // all ContentProviders. The ContentProviders break without the correct Application
+            // class being installed, so temporarily pretend there are no providers, and then
+            // instantiate them explicitly within onCreate().
+            disableContentProviders();
+            Log.i(TAG, "Waiting for Instrumentation.onCreate");
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    /**
+     * Returns the fully-qualified class name for the given key, stored in a
+     * &lt;meta&gt; witin the manifest.
+     */
+    private static String getClassNameFromMetadata(String key, Context context)
+            throws NameNotFoundException {
+        String pkgName = context.getPackageName();
+        ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName,
+                PackageManager.GET_META_DATA);
+        String value = appInfo.metaData.getString(key);
+        if (value != null && !value.contains(".")) {
+            value = pkgName + "." + value;
+        }
+        return value;
+    }
+
+    /**
+     * Instantiates and initializes mRealInstrumentation (the real Instrumentation class).
+     */
+    private Instrumentation initInstrumentation(String realInstrumentationName)
+            throws ReflectiveOperationException {
+        if (realInstrumentationName == null) {
+            // This is the case when an incremental app is used as a target for an instrumentation
+            // test. In this case, ActivityThread can instantiate the proper class just fine since
+            // it exists within the test apk (as opposed to the incremental apk-under-test).
+            Log.i(TAG, "Running with external instrumentation");
+            return null;
+        }
+        // For unit tests, the instrumentation class is replaced in the manifest by a build step
+        // because ActivityThread tries to instantiate it before we get a chance to load the
+        // incremental dex files.
+        Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName);
+        Instrumentation ret =
+                (Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName));
+        populateInstrumenationFields(ret);
+        return ret;
+    }
+
+    /**
+     * Sets important fields on a newly created Instrumentation object by copying them from the
+     * original Instrumentation instance.
+     */
+    private void populateInstrumenationFields(Instrumentation target)
+            throws ReflectiveOperationException {
+        // Initialize the fields that are set by Instrumentation.init().
+        String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue",
+                "mThread", "mUiAutomationConnection", "mWatcher"};
+        for (String fieldName : initFields) {
+            Reflect.setField(target, fieldName, Reflect.getField(mOrigInstrumentation, fieldName));
+        }
+    }
+
+    /**
+     * Called by BootstrapInstrumentation from Instrumentation.onCreate().
+     * This happens regardless of whether or not instrumentation is enabled.
+     */
+    void onInstrumentationCreate(Bundle arguments) {
+        Log.i(TAG, "Instrumentation.onCreate() called. Swapping references.");
+        try {
+            swapApplicationReferences();
+            enableContentProviders();
+            if (mRealInstrumentation != null) {
+                Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation);
+                mRealInstrumentation.onCreate(arguments);
+            }
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    @Override
+    public void onCreate() {
+        super.onCreate();
+        try {
+            Log.i(TAG, "Application.onCreate() called.");
+            mRealApplication.onCreate();
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    /**
+     * Nulls out ActivityThread.mBoundApplication.providers.
+     */
+    private void disableContentProviders() throws ReflectiveOperationException {
+        Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+        mStashedProviderList = Reflect.getField(data, "providers");
+        Reflect.setField(data, "providers", null);
+    }
+
+    /**
+     * Restores the value of ActivityThread.mBoundApplication.providers, and invokes
+     * ActivityThread#installContentProviders().
+     */
+    private void enableContentProviders() throws ReflectiveOperationException {
+        Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+        Reflect.setField(data, "providers", mStashedProviderList);
+        if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) {
+            Log.i(TAG, "Instantiating content providers");
+            Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication,
+                    mStashedProviderList);
+        }
+        mStashedProviderList = null;
+    }
+
+    /**
+     * Changes all fields within framework classes that have stored an reference to this
+     * BootstrapApplication to instead store references to mRealApplication.
+     */
+    @SuppressWarnings("unchecked")
+    private void swapApplicationReferences() throws ReflectiveOperationException {
+        if (Reflect.getField(mActivityThread, "mInitialApplication") == this) {
+            Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication);
+        }
+
+        List<Application> allApplications =
+                (List<Application>) Reflect.getField(mActivityThread, "mAllApplications");
+        for (int i = 0; i < allApplications.size(); i++) {
+            if (allApplications.get(i) == this) {
+                allApplications.set(i, mRealApplication);
+            }
+        }
+
+        // Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail
+        // if not replaced.
+        Context contextImpl = mRealApplication.getBaseContext();
+        Reflect.setField(contextImpl, "mOuterContext", mRealApplication);
+
+        for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) {
+            Map<String, WeakReference<?>> packageMap =
+                    (Map<String, WeakReference<?>>) Reflect.getField(mActivityThread, fieldName);
+            for (Map.Entry<String, WeakReference<?>> entry : packageMap.entrySet()) {
+                Object loadedApk = entry.getValue().get();
+                if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) {
+                    Reflect.setField(loadedApk, "mApplication", mRealApplication);
+                }
+            }
+        }
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
new file mode 100644
index 0000000..f197406
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
@@ -0,0 +1,25 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+import android.os.Bundle;
+
+/**
+ * Notifies BootstrapApplication of the call to Instrumentation.onCreate().
+ */
+public final class BootstrapInstrumentation extends Instrumentation {
+    private final BootstrapApplication mApp;
+
+    BootstrapInstrumentation(BootstrapApplication app) {
+        mApp = app;
+    }
+
+    @Override
+    public void onCreate(Bundle arguments) {
+        super.onCreate(arguments);
+        mApp.onInstrumentationCreate(arguments);
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
new file mode 100644
index 0000000..b6d7522
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
@@ -0,0 +1,312 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.os.Build;
+import android.os.Process;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Provides the ability to add native libraries and .dex files to an existing class loader.
+ * Tested with Jellybean MR2 - Marshmellow.
+ */
+final class ClassLoaderPatcher {
+    private static final String TAG = "incrementalinstall";
+    private final File mAppFilesSubDir;
+    private final ClassLoader mClassLoader;
+    private final Object mLibcoreOs;
+    private final int mProcessUid;
+    final boolean mIsPrimaryProcess;
+
+    ClassLoaderPatcher(Context context) throws ReflectiveOperationException {
+        mAppFilesSubDir =
+                new File(context.getApplicationInfo().dataDir, "incremental-install-files");
+        mClassLoader = context.getClassLoader();
+        mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os");
+        mProcessUid = Process.myUid();
+        mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid;
+        Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")");
+    }
+
+    /**
+     * Loads all dex files within |dexDir| into the app's ClassLoader.
+     */
+    @SuppressLint({
+            "SetWorldReadable",
+            "SetWorldWritable",
+    })
+    DexFile[] loadDexFiles(File dexDir, String packageName)
+            throws ReflectiveOperationException, IOException {
+        Log.i(TAG, "Installing dex files from: " + dexDir);
+
+        File optimizedDir = null;
+        boolean isAtLeastOreo = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
+
+        if (isAtLeastOreo) {
+            // In O, optimizedDirectory is ignored, and the files are always put in an "oat"
+            // directory that is a sibling to the dex files themselves. SELinux policies
+            // prevent using odex files from /data/local/tmp, so we must first copy them
+            // into the app's data directory in order to get the odex files to live there.
+            // Use a package-name subdirectory to prevent name collisions when apk-under-test is
+            // used.
+            File newDexDir = new File(mAppFilesSubDir, packageName + "-dexes");
+            if (mIsPrimaryProcess) {
+                safeCopyAllFiles(dexDir, newDexDir);
+            }
+            dexDir = newDexDir;
+        } else {
+            // The optimized dex files will be owned by this process' user.
+            // Store them within the app's data dir rather than on /data/local/tmp
+            // so that they are still deleted (by the OS) when we uninstall
+            // (even on a non-rooted device).
+            File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes");
+            File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes");
+
+            if (mIsPrimaryProcess) {
+                ensureAppFilesSubDirExists();
+                // Allows isolated processes to access the same files.
+                incrementalDexesDir.mkdir();
+                incrementalDexesDir.setReadable(true, false);
+                incrementalDexesDir.setExecutable(true, false);
+                // Create a directory for isolated processes to create directories in.
+                isolatedDexesDir.mkdir();
+                isolatedDexesDir.setWritable(true, false);
+                isolatedDexesDir.setExecutable(true, false);
+
+                optimizedDir = incrementalDexesDir;
+            } else {
+                // There is a UID check of the directory in dalvik.system.DexFile():
+                // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101
+                // Rather than have each isolated process run DexOpt though, we use
+                // symlinks within the directory to point at the browser process'
+                // optimized dex files.
+                optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid);
+                optimizedDir.mkdir();
+                // Always wipe it out and re-create for simplicity.
+                Log.i(TAG, "Creating dex file symlinks for isolated process");
+                for (File f : optimizedDir.listFiles()) {
+                    f.delete();
+                }
+                for (File f : incrementalDexesDir.listFiles()) {
+                    String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName();
+                    File from = new File(optimizedDir, f.getName());
+                    createSymlink(to, from);
+                }
+            }
+            Log.i(TAG, "Code cache dir: " + optimizedDir);
+        }
+
+        // Ignore "oat" directory.
+        // Also ignore files that sometimes show up (e.g. .jar.arm.flock).
+        File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar"));
+        if (dexFilesArr == null) {
+            throw new FileNotFoundException("Dex dir does not exist: " + dexDir);
+        }
+
+        Log.i(TAG, "Loading " + dexFilesArr.length + " dex files");
+
+        Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+        Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements");
+        dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements);
+        Reflect.setField(dexPathList, "dexElements", dexElements);
+
+        // Return the list of new DexFile instances for the .jars in dexPathList.
+        DexFile[] ret = new DexFile[dexFilesArr.length];
+        int startIndex = dexElements.length - dexFilesArr.length;
+        for (int i = 0; i < ret.length; ++i) {
+            ret[i] = (DexFile) Reflect.getField(dexElements[startIndex + i], "dexFile");
+        }
+        return ret;
+    }
+
+    /**
+     * Sets up all libraries within |libDir| to be loadable by System.loadLibrary().
+     */
+    @SuppressLint("SetWorldReadable")
+    void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException {
+        Log.i(TAG, "Importing native libraries from: " + libDir);
+        if (!libDir.exists()) {
+            Log.i(TAG, "No native libs exist.");
+            return;
+        }
+        // The library copying is not necessary on older devices, but we do it anyways to
+        // simplify things (it's fast compared to dexing).
+        // https://code.google.com/p/android/issues/detail?id=79480
+        File localLibsDir = new File(mAppFilesSubDir, "lib");
+        safeCopyAllFiles(libDir, localLibsDir);
+        addNativeLibrarySearchPath(localLibsDir);
+    }
+
+    @SuppressLint("SetWorldReadable")
+    private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException {
+        // The library copying is not necessary on older devices, but we do it anyways to
+        // simplify things (it's fast compared to dexing).
+        // https://code.google.com/p/android/issues/detail?id=79480
+        File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock");
+        if (mIsPrimaryProcess) {
+            ensureAppFilesSubDirExists();
+            LockFile lock = LockFile.acquireRuntimeLock(lockFile);
+            if (lock == null) {
+                LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+            } else {
+                try {
+                    dstDir.mkdir();
+                    dstDir.setReadable(true, false);
+                    dstDir.setExecutable(true, false);
+                    copyChangedFiles(srcDir, dstDir);
+                } finally {
+                    lock.release();
+                }
+            }
+        } else {
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                // TODO: Work around this issue by using APK splits to install each dex / lib.
+                throw new RuntimeException("Incremental install does not work on Android M+ "
+                        + "with isolated processes. Build system should have removed this. "
+                        + "Please file a bug.");
+            }
+            // Other processes: Waits for primary process to finish copying.
+            LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException {
+        Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+        Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories");
+        File[] newDirs = new File[] { nativeLibDir };
+        // Switched from an array to an ArrayList in Lollipop.
+        if (currentDirs instanceof List) {
+            List<File> dirsAsList = (List<File>) currentDirs;
+            dirsAsList.add(0, nativeLibDir);
+        } else {
+            File[] dirsAsArray = (File[]) currentDirs;
+            Reflect.setField(dexPathList, "nativeLibraryDirectories",
+                    Reflect.concatArrays(newDirs, newDirs, dirsAsArray));
+        }
+
+        Object[] nativeLibraryPathElements;
+        try {
+            nativeLibraryPathElements =
+                    (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements");
+        } catch (NoSuchFieldException e) {
+            // This field doesn't exist pre-M.
+            return;
+        }
+        Object[] additionalElements = makeNativePathElements(newDirs);
+        Reflect.setField(dexPathList, "nativeLibraryPathElements",
+                Reflect.concatArrays(nativeLibraryPathElements, additionalElements,
+                        nativeLibraryPathElements));
+    }
+
+    private static void copyChangedFiles(File srcDir, File dstDir) throws IOException {
+        int numUpdated = 0;
+        File[] srcFiles = srcDir.listFiles();
+        for (File f : srcFiles) {
+            // Note: Tried using hardlinks, but resulted in EACCES exceptions.
+            File dest = new File(dstDir, f.getName());
+            if (copyIfModified(f, dest)) {
+                numUpdated++;
+            }
+        }
+        // Delete stale files.
+        int numDeleted = 0;
+        for (File f : dstDir.listFiles()) {
+            File src = new File(srcDir, f.getName());
+            if (!src.exists()) {
+                numDeleted++;
+                f.delete();
+            }
+        }
+        String msg = String.format(Locale.US,
+                "copyChangedFiles: %d of %d updated. %d stale files removed.", numUpdated,
+                srcFiles.length, numDeleted);
+        Log.i(TAG, msg);
+    }
+
+    @SuppressLint("SetWorldReadable")
+    private static boolean copyIfModified(File src, File dest) throws IOException {
+        long lastModified = src.lastModified();
+        if (dest.exists() && dest.lastModified() == lastModified) {
+            return false;
+        }
+        Log.i(TAG, "Copying " + src + " -> " + dest);
+        FileInputStream istream = new FileInputStream(src);
+        FileOutputStream ostream = new FileOutputStream(dest);
+        ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size());
+        istream.close();
+        ostream.close();
+        dest.setReadable(true, false);
+        dest.setExecutable(true,  false);
+        dest.setLastModified(lastModified);
+        return true;
+    }
+
+    private void ensureAppFilesSubDirExists() {
+        mAppFilesSubDir.mkdir();
+        mAppFilesSubDir.setExecutable(true, false);
+    }
+
+    private void createSymlink(String to, File from) throws ReflectiveOperationException {
+        Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath());
+    }
+
+    private static Object[] makeNativePathElements(File[] paths)
+            throws ReflectiveOperationException {
+        Object[] entries = new Object[paths.length];
+        if (Build.VERSION.SDK_INT >= 26) {
+            Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$NativeLibraryElement");
+            for (int i = 0; i < paths.length; ++i) {
+                entries[i] = Reflect.newInstance(entryClazz, paths[i]);
+            }
+        } else {
+            Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+            for (int i = 0; i < paths.length; ++i) {
+                entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null);
+            }
+        }
+        return entries;
+    }
+
+    private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements)
+            throws ReflectiveOperationException {
+        Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+        Class<?> clazz = Class.forName("dalvik.system.DexPathList");
+        Object[] ret =
+                Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]);
+        File emptyDir = new File("");
+        for (int i = 0; i < files.length; ++i) {
+            File file = files[i];
+            Object dexFile;
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+                // loadDexFile requires that ret contain all previously added elements.
+                dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory,
+                                               mClassLoader, ret);
+            } else {
+                dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory);
+            }
+            Object dexElement;
+            if (Build.VERSION.SDK_INT >= 26) {
+                dexElement = Reflect.newInstance(entryClazz, dexFile, file);
+            } else {
+                dexElement = Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile);
+            }
+            ret[curDexElements.length + i] = dexElement;
+        }
+        return ret;
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
new file mode 100644
index 0000000..19d1f76
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
@@ -0,0 +1,129 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileLock;
+import java.util.concurrent.Callable;
+
+/**
+ * Helpers for dealing with .lock files used during install / first run.
+ */
+final class LockFile {
+    private static final String TAG = "incrementalinstall";
+
+    private final File mFile;
+    private final FileOutputStream mOutputStream;
+    private final FileLock mFileLock;
+
+    private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) {
+        mFile = file;
+        mOutputStream = outputStream;
+        mFileLock = fileLock;
+    }
+
+    /**
+     * Clears the lock file by writing to it (making it non-zero in length);
+     */
+    static void clearInstallerLock(File lockFile) throws IOException {
+        Log.i(TAG, "Clearing " + lockFile);
+        // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead.
+        FileOutputStream os = new FileOutputStream(lockFile);
+        os.write(1);
+        os.close();
+    }
+
+    /**
+     * Waits for the given file to be non-zero in length.
+     */
+    static void waitForInstallerLock(final File file, long timeoutMs) {
+        pollingWait(new Callable<Boolean>() {
+            @Override public Boolean call() {
+                return !installerLockExists(file);
+            }
+        }, file, timeoutMs);
+    }
+
+    /**
+     * Waits for the given file to be non-zero in length.
+     */
+    private static void pollingWait(Callable<Boolean> func, File file, long timeoutMs) {
+        long pollIntervalMs = 200;
+        for (int i = 0; i < timeoutMs / pollIntervalMs; i++) {
+            try {
+                if (func.call()) {
+                    if (i > 0) {
+                        Log.i(TAG, "Finished waiting on lock file: " + file);
+                    }
+                    return;
+                } else if (i == 0) {
+                    Log.i(TAG, "Waiting on lock file: " + file);
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            try {
+                Thread.sleep(pollIntervalMs);
+            } catch (InterruptedException e) {
+                // Should never happen.
+            }
+        }
+        throw new RuntimeException("Timed out waiting for lock file: " + file);
+    }
+
+    /**
+     * Returns whether the given lock file is missing or is in the locked state.
+     */
+    static boolean installerLockExists(File file) {
+        return !file.exists() || file.length() == 0;
+    }
+
+    /**
+     * Attempts to acquire a lock for the given file.
+     * @return Returns the FileLock if it was acquired, or null otherwise.
+     */
+    static LockFile acquireRuntimeLock(File file) {
+        try {
+            FileOutputStream outputStream = new FileOutputStream(file);
+            FileLock lock = outputStream.getChannel().tryLock();
+            if (lock != null) {
+                Log.i(TAG, "Created lock file: " + file);
+                return new LockFile(file, outputStream, lock);
+            }
+            outputStream.close();
+        } catch (IOException e) {
+            // Do nothing. We didn't get the lock.
+            Log.w(TAG, "Exception trying to acquire lock " + file, e);
+        }
+        return null;
+    }
+
+    /**
+     * Waits for the given file to not exist.
+     */
+    static void waitForRuntimeLock(final File file, long timeoutMs) {
+        pollingWait(new Callable<Boolean>() {
+            @Override public Boolean call() {
+                return !file.exists();
+            }
+        }, file, timeoutMs);
+    }
+
+    /**
+     * Releases and deletes the lock file.
+     */
+    void release() throws IOException {
+        Log.i(TAG, "Deleting lock file: " + mFile);
+        mFileLock.release();
+        mOutputStream.close();
+        if (!mFile.delete()) {
+            throw new IOException("Failed to delete lock file: " + mFile);
+        }
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
new file mode 100644
index 0000000..c64dc1e
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
@@ -0,0 +1,142 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Reflection helper methods.
+ */
+final class Reflect {
+    /**
+     * Sets the value of an object's field (even if it's not visible).
+     *
+     * @param instance The object containing the field to set.
+     * @param name The name of the field to set.
+     * @param value The new value for the field.
+     */
+    static void setField(Object instance, String name, Object value)
+            throws ReflectiveOperationException {
+        Field field = findField(instance, name);
+        field.setAccessible(true);
+        field.set(instance, value);
+    }
+
+    /**
+     * Retrieves the value of an object's field (even if it's not visible).
+     *
+     * @param instance The object containing the field to set.
+     * @param name The name of the field to set.
+     * @return The field's value. Primitive values are returned as their boxed
+     *         type.
+     */
+    static Object getField(Object instance, String name) throws ReflectiveOperationException {
+        Field field = findField(instance, name);
+        field.setAccessible(true);
+        return field.get(instance);
+    }
+
+    /**
+     * Concatenates two arrays into a new array. The arrays must be of the same
+     * type.
+     */
+    static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) {
+        Object[] result = (Object[]) Array.newInstance(
+                arrType.getClass().getComponentType(), left.length + right.length);
+        System.arraycopy(left, 0, result, 0, left.length);
+        System.arraycopy(right, 0, result, left.length, right.length);
+        return result;
+    }
+
+    /**
+     * Invokes a method with zero or more parameters. For static methods, use the Class as the
+     * instance.
+     */
+    static Object invokeMethod(Object instance, String name, Object... params)
+            throws ReflectiveOperationException {
+        boolean isStatic = instance instanceof Class;
+        Class<?> clazz = isStatic ? (Class<?>) instance :  instance.getClass();
+        Method method = findMethod(clazz, name, params);
+        method.setAccessible(true);
+        return method.invoke(instance, params);
+    }
+
+    /**
+     * Calls a constructor with zero or more parameters.
+     */
+    static Object newInstance(Class<?> clazz, Object... params)
+            throws ReflectiveOperationException {
+        Constructor<?> constructor = findConstructor(clazz, params);
+        constructor.setAccessible(true);
+        return constructor.newInstance(params);
+    }
+
+    private static Field findField(Object instance, String name) throws NoSuchFieldException {
+        boolean isStatic = instance instanceof Class;
+        Class<?> clazz = isStatic ? (Class<?>) instance :  instance.getClass();
+        for (; clazz != null; clazz = clazz.getSuperclass()) {
+            try {
+                return clazz.getDeclaredField(name);
+            } catch (NoSuchFieldException e) {
+                // Need to look in the super class.
+            }
+        }
+        throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass());
+    }
+
+    private static Method findMethod(Class<?> clazz, String name, Object... params)
+            throws NoSuchMethodException {
+        for (; clazz != null; clazz = clazz.getSuperclass()) {
+            for (Method method : clazz.getDeclaredMethods()) {
+                if (method.getName().equals(name)
+                        && areParametersCompatible(method.getParameterTypes(), params)) {
+                    return method;
+                }
+            }
+        }
+        throw new NoSuchMethodException("Method " + name + " with parameters "
+                + Arrays.asList(params) + " not found in " + clazz);
+    }
+
+    private static Constructor<?> findConstructor(Class<?> clazz, Object... params)
+            throws NoSuchMethodException {
+        for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
+            if (areParametersCompatible(constructor.getParameterTypes(), params)) {
+                return constructor;
+            }
+        }
+        throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params)
+                + " not found in " + clazz);
+    }
+
+    private static boolean areParametersCompatible(Class<?>[] paramTypes, Object... params) {
+        if (params.length != paramTypes.length) {
+            return false;
+        }
+        for (int i = 0; i < params.length; i++) {
+            if (!isAssignableFrom(paramTypes[i], params[i])) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private static boolean isAssignableFrom(Class<?> left, Object right) {
+        if (right == null) {
+            return !left.isPrimitive();
+        }
+        Class<?> rightClazz = right.getClass();
+        if (left.isPrimitive()) {
+            // TODO(agrieve): Fill in the rest as needed.
+            return left == boolean.class && rightClazz == Boolean.class
+                   || left == int.class && rightClazz == Integer.class;
+        }
+        return left.isAssignableFrom(rightClazz);
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
new file mode 100644
index 0000000..3e0df05
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
@@ -0,0 +1,12 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+
+/**
+ * Exists to support an app having multiple instrumentations.
+ */
+public final class SecondInstrumentation extends Instrumentation {}
diff --git a/src/build/android/incremental_install/write_installer_json.py b/src/build/android/incremental_install/write_installer_json.py
new file mode 100755
index 0000000..cf1d2d4
--- /dev/null
+++ b/src/build/android/incremental_install/write_installer_json.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a .json file with the per-apk details for an incremental install."""
+
+import argparse
+import json
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output-path',
+                      help='Output path for .json file.',
+                      required=True)
+  parser.add_argument('--apk-path',
+                      help='Path to .apk relative to output directory.',
+                      required=True)
+  parser.add_argument('--split',
+                      action='append',
+                      dest='split_globs',
+                      default=[],
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument(
+      '--native-libs',
+      action='append',
+      help='GN-list of paths to native libraries relative to '
+      'output directory. Can be repeated.')
+  parser.add_argument(
+      '--dex-files', help='GN-list of dex paths relative to output directory.')
+  parser.add_argument('--show-proguard-warning',
+                      action='store_true',
+                      default=False,
+                      help='Print a warning about proguard being disabled')
+
+  options = parser.parse_args(args)
+  options.dex_files = build_utils.ParseGnList(options.dex_files)
+  options.native_libs = build_utils.ParseGnList(options.native_libs)
+  return options
+
+
+def main(args):
+  options = _ParseArgs(args)
+
+  data = {
+      'apk_path': options.apk_path,
+      'native_libs': options.native_libs,
+      'dex_files': options.dex_files,
+      'show_proguard_warning': options.show_proguard_warning,
+      'split_globs': options.split_globs,
+  }
+
+  with build_utils.AtomicOutput(options.output_path, mode='w+') as f:
+    json.dump(data, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/incremental_install/write_installer_json.pydeps b/src/build/android/incremental_install/write_installer_json.pydeps
new file mode 100644
index 0000000..11a263f
--- /dev/null
+++ b/src/build/android/incremental_install/write_installer_json.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+write_installer_json.py
diff --git a/src/build/android/java/templates/BuildConfig.template b/src/build/android/java/templates/BuildConfig.template
new file mode 100644
index 0000000..8953ad5
--- /dev/null
+++ b/src/build/android/java/templates/BuildConfig.template
@@ -0,0 +1,95 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+#if defined(USE_FINAL)
+#define MAYBE_FINAL final
+#define MAYBE_ZERO = 0
+#define MAYBE_FALSE = false
+#else
+#define MAYBE_FINAL
+#define MAYBE_ZERO
+#define MAYBE_FALSE
+#endif
+
+/**
+ *  Build configuration. Generated on a per-target basis.
+ */
+public class BuildConfig {
+
+#if defined(ENABLE_MULTIDEX)
+    public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED = true;
+#else
+    public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED MAYBE_FALSE;
+#endif
+
+#if defined(_ENABLE_ASSERTS)
+    public static MAYBE_FINAL boolean ENABLE_ASSERTS = true;
+#else
+    public static MAYBE_FINAL boolean ENABLE_ASSERTS MAYBE_FALSE;
+#endif
+
+#if defined(_IS_UBSAN)
+    public static MAYBE_FINAL boolean IS_UBSAN = true;
+#else
+    public static MAYBE_FINAL boolean IS_UBSAN MAYBE_FALSE;
+#endif
+
+#if defined(_IS_CHROME_BRANDED)
+    public static MAYBE_FINAL boolean IS_CHROME_BRANDED = true;
+#else
+    public static MAYBE_FINAL boolean IS_CHROME_BRANDED MAYBE_FALSE;
+#endif
+
+    // The ID of the android string resource that stores the product version.
+    // This layer of indirection is necessary to make the resource dependency
+    // optional for android_apk targets/base_java (ex. for cronet).
+#if defined(_RESOURCES_VERSION_VARIABLE)
+    public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION = _RESOURCES_VERSION_VARIABLE;
+#else
+    // Default value, do not use.
+    public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION MAYBE_ZERO;
+#endif
+
+    // Minimum SDK Version supported by this apk.
+    // Be cautious when using this value, as it can happen that older apks get
+    // installed on newer Android version (e.g. when a device goes through a
+    // system upgrade). It is also convenient for developing to have all
+    // features available through a single APK.
+    // However, it's pretty safe to assument that a feature specific to KitKat
+    // will never be needed in an APK with MIN_SDK_VERSION = Oreo.
+#if defined(_MIN_SDK_VERSION)
+    public static MAYBE_FINAL int MIN_SDK_VERSION = _MIN_SDK_VERSION;
+#else
+    public static MAYBE_FINAL int MIN_SDK_VERSION = 1;
+#endif
+
+#if defined(_BUNDLES_SUPPORTED)
+    public static MAYBE_FINAL boolean BUNDLES_SUPPORTED = true;
+#else
+    public static MAYBE_FINAL boolean BUNDLES_SUPPORTED MAYBE_FALSE;
+#endif
+
+#if defined(_IS_INCREMENTAL_INSTALL)
+    public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL = true;
+#else
+    public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL MAYBE_FALSE;
+#endif
+
+#if defined(_IS_CHROMECAST_BRANDING_INTERNAL)
+    public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL = true;
+#else
+    public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL MAYBE_FALSE;
+#endif
+
+#if defined(_ISOLATED_SPLITS_ENABLED)
+    public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED = true;
+#else
+    public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED MAYBE_FALSE;
+#endif
+}
diff --git a/src/build/android/java/templates/ProductConfig.template b/src/build/android/java/templates/ProductConfig.template
new file mode 100644
index 0000000..4bc0d52
--- /dev/null
+++ b/src/build/android/java/templates/ProductConfig.template
@@ -0,0 +1,34 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package PACKAGE;
+
+#if defined(USE_FINAL)
+#define MAYBE_FINAL final
+#define MAYBE_USE_CHROMIUM_LINKER = USE_CHROMIUM_LINKER_VALUE
+#define MAYBE_USE_MODERN_LINKER = USE_MODERN_LINKER_VALUE
+#define MAYBE_IS_BUNDLE = IS_BUNDLE_VALUE
+#else
+#define MAYBE_FINAL
+#define MAYBE_USE_CHROMIUM_LINKER
+#define MAYBE_USE_MODERN_LINKER
+#define MAYBE_IS_BUNDLE
+#endif
+
+/**
+ *  Product configuration. Generated on a per-target basis.
+ */
+public class ProductConfig {
+    // Sorted list of locales that have an uncompressed .pak within assets.
+    // Stored as an array because AssetManager.list() is slow.
+#if defined(LOCALE_LIST)
+    public static final String[] LOCALES = LOCALE_LIST;
+#else
+    public static final String[] LOCALES = {};
+#endif
+
+   public static MAYBE_FINAL boolean USE_CHROMIUM_LINKER MAYBE_USE_CHROMIUM_LINKER;
+   public static MAYBE_FINAL boolean USE_MODERN_LINKER MAYBE_USE_MODERN_LINKER;
+   public static MAYBE_FINAL boolean IS_BUNDLE MAYBE_IS_BUNDLE;
+}
diff --git a/src/build/android/java/test/DefaultLocaleLintTest.java b/src/build/android/java/test/DefaultLocaleLintTest.java
new file mode 100644
index 0000000..2193429
--- /dev/null
+++ b/src/build/android/java/test/DefaultLocaleLintTest.java
@@ -0,0 +1,17 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package test;
+
+import android.app.Application;
+
+/**
+ * Class which fails 'DefaultLocale' lint check.
+ */
+public class LintTest extends Application {
+    public String testTriggerDefaultLocaleCheck(int any) {
+        // String format with an integer requires a Locale since it may be formatted differently.
+        return String.format("Test %d", any);
+    }
+}
diff --git a/src/build/android/java/test/NewApiLintTest.java b/src/build/android/java/test/NewApiLintTest.java
new file mode 100644
index 0000000..6c68dd8
--- /dev/null
+++ b/src/build/android/java/test/NewApiLintTest.java
@@ -0,0 +1,17 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package test;
+
+import android.app.Application;
+
+/**
+ * Class which fails 'NewAPI' lint check.
+ */
+public class NewApiTest extends Application {
+    public String testTriggerNewApiCheck() {
+        // This was added in API level 30.
+        return getApplicationContext().getAttributionTag();
+    }
+}
diff --git a/src/build/android/lighttpd_server.py b/src/build/android/lighttpd_server.py
new file mode 100755
index 0000000..42fbcdb
--- /dev/null
+++ b/src/build/android/lighttpd_server.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+from __future__ import print_function
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        omitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temporary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  @staticmethod
+  def _GetRandomPort():
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      # pylint: disable=no-member
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or 'in use' not in server_error:
+        print('Client error:', client_error)
+        print('Server error:', server_error)
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      # pylint: disable=no-member
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0) # pylint: disable=no-member
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  @staticmethod
+  def _KillProcessListeningOnPort(port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  @staticmethod
+  def _GetDefaultBaseConfig():
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print('Server exit code:', server.process.exitstatus)
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/list_class_verification_failures.py b/src/build/android/list_class_verification_failures.py
new file mode 100755
index 0000000..508e831
--- /dev/null
+++ b/src/build/android/list_class_verification_failures.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A helper script to list class verification errors.
+
+This is a wrapper around the device's oatdump executable, parsing desired output
+and accommodating API-level-specific details, such as file paths.
+"""
+
+from __future__ import print_function
+
+import argparse
+import exceptions
+import logging
+import os
+import re
+
+import devil_chromium
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+from devil.android.tools import script_common
+from devil.utils import logging_common
+from py_utils import tempfile_ext
+
+STATUSES = [
+    'NotReady',
+    'RetryVerificationAtRuntime',
+    'Verified',
+    'Initialized',
+    'SuperclassValidated',
+]
+
+
+def DetermineDeviceToUse(devices):
+  """Like DeviceUtils.HealthyDevices(), but only allow a single device.
+
+  Args:
+    devices: A (possibly empty) list of serial numbers, such as from the
+        --device flag.
+  Returns:
+    A single device_utils.DeviceUtils instance.
+  Raises:
+    device_errors.NoDevicesError: Raised when no non-denylisted devices exist.
+    device_errors.MultipleDevicesError: Raise when multiple devices exist, but
+        |devices| does not distinguish which to use.
+  """
+  if not devices:
+    # If the user did not specify which device, we let HealthyDevices raise
+    # MultipleDevicesError.
+    devices = None
+  usable_devices = device_utils.DeviceUtils.HealthyDevices(device_arg=devices)
+  # If the user specified more than one device, we still only want to support a
+  # single device, so we explicitly raise MultipleDevicesError.
+  if len(usable_devices) > 1:
+    raise device_errors.MultipleDevicesError(usable_devices)
+  return usable_devices[0]
+
+
+class DeviceOSError(Exception):
+  """Raised when a file is missing from the device, or something similar."""
+  pass
+
+
+class UnsupportedDeviceError(Exception):
+  """Raised when the device is not supported by this script."""
+  pass
+
+
+def _GetFormattedArch(device):
+  abi = device.product_cpu_abi
+  # Some architectures don't map 1:1 with the folder names.
+  return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi)
+
+
+def PathToDexForPlatformVersion(device, package_name):
+  """Gets the full path to the dex file on the device."""
+  sdk_level = device.build_version_sdk
+  paths_to_apk = device.GetApplicationPaths(package_name)
+  if not paths_to_apk:
+    raise DeviceOSError(
+        'Could not find data directory for {}. Is it installed?'.format(
+            package_name))
+  if len(paths_to_apk) != 1:
+    raise DeviceOSError(
+        'Expected exactly one path for {} but found {}'.format(
+            package_name,
+            paths_to_apk))
+  path_to_apk = paths_to_apk[0]
+
+  if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1:
+    # Of the form "com.example.foo-\d", where \d is some digit (usually 1 or 2)
+    package_with_suffix = os.path.basename(os.path.dirname(path_to_apk))
+    arch = _GetFormattedArch(device)
+    dalvik_prefix = '/data/dalvik-cache/{arch}'.format(arch=arch)
+    odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format(
+        prefix=dalvik_prefix,
+        package=package_with_suffix)
+  elif sdk_level >= version_codes.MARSHMALLOW:
+    arch = _GetFormattedArch(device)
+    odex_file = '{data_dir}/oat/{arch}/base.odex'.format(
+        data_dir=os.path.dirname(path_to_apk), arch=arch)
+  else:
+    raise UnsupportedDeviceError('Unsupported API level: {}'.format(sdk_level))
+
+  odex_file_exists = device.FileExists(odex_file)
+  if odex_file_exists:
+    return odex_file
+  elif sdk_level >= version_codes.PIE:
+    raise DeviceOSError(
+        'Unable to find odex file: you must run dex2oat on debuggable apps '
+        'on >= P after installation.')
+  raise DeviceOSError('Unable to find odex file ' + odex_file)
+
+
+def _AdbOatDumpForPackage(device, package_name, out_file):
+  """Runs oatdump on the device."""
+  # Get the path to the odex file.
+  odex_file = PathToDexForPlatformVersion(device, package_name)
+  device.RunShellCommand(
+      ['oatdump', '--oat-file=' + odex_file, '--output=' + out_file],
+      timeout=420,
+      shell=True,
+      check_return=True)
+
+
+class JavaClass(object):
+  """This represents a Java Class and its ART Class Verification status."""
+
+  def __init__(self, name, verification_status):
+    self.name = name
+    self.verification_status = verification_status
+
+
+def _ParseMappingFile(proguard_map_file):
+  """Creates a map of obfuscated names to deobfuscated names."""
+  mappings = {}
+  with open(proguard_map_file, 'r') as f:
+    pattern = re.compile(r'^(\S+) -> (\S+):')
+    for line in f:
+      m = pattern.match(line)
+      if m is not None:
+        deobfuscated_name = m.group(1)
+        obfuscated_name = m.group(2)
+        mappings[obfuscated_name] = deobfuscated_name
+  return mappings
+
+
+def _DeobfuscateJavaClassName(dex_code_name, proguard_mappings):
+  return proguard_mappings.get(dex_code_name, dex_code_name)
+
+
+def FormatJavaClassName(dex_code_name, proguard_mappings):
+  obfuscated_name = dex_code_name.replace('/', '.')
+  if proguard_mappings is not None:
+    return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings)
+  else:
+    return obfuscated_name
+
+
+def ListClassesAndVerificationStatus(oatdump_output, proguard_mappings):
+  """Lists all Java classes in the dex along with verification status."""
+  java_classes = []
+  pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*')
+  for line in oatdump_output:
+    m = pattern.match(line)
+    if m is not None:
+      name = FormatJavaClassName(m.group(1), proguard_mappings)
+      # Some platform levels prefix this with "Status" while other levels do
+      # not. Strip this for consistency.
+      verification_status = m.group(2).replace('Status', '')
+      java_classes.append(JavaClass(name, verification_status))
+  return java_classes
+
+
+def _PrintVerificationResults(target_status, java_classes, show_summary):
+  """Prints results for user output."""
+  # Sort to keep output consistent between runs.
+  java_classes.sort(key=lambda c: c.name)
+  d = {}
+  for status in STATUSES:
+    d[status] = 0
+
+  for java_class in java_classes:
+    if java_class.verification_status == target_status:
+      print(java_class.name)
+    if java_class.verification_status not in d:
+      raise exceptions.RuntimeError('Unexpected status: {0}'.format(
+          java_class.verification_status))
+    else:
+      d[java_class.verification_status] += 1
+
+  if show_summary:
+    for status in d:
+      count = d[status]
+      print('Total {status} classes: {num}'.format(
+          status=status, num=count))
+    print('Total number of classes: {num}'.format(
+        num=len(java_classes)))
+
+
+def RealMain(mapping, device_arg, package, status, hide_summary, workdir):
+  if mapping is None:
+    logging.warn('Skipping deobfuscation because no map file was provided.')
+  device = DetermineDeviceToUse(device_arg)
+  device.EnableRoot()
+  with device_temp_file.DeviceTempFile(
+      device.adb) as file_on_device:
+    _AdbOatDumpForPackage(device, package, file_on_device.name)
+    file_on_host = os.path.join(workdir, 'out.dump')
+    device.PullFile(file_on_device.name, file_on_host, timeout=220)
+  proguard_mappings = (_ParseMappingFile(mapping) if mapping else None)
+  with open(file_on_host, 'r') as f:
+    java_classes = ListClassesAndVerificationStatus(f, proguard_mappings)
+    _PrintVerificationResults(status, java_classes, not hide_summary)
+
+
+def main():
+  parser = argparse.ArgumentParser(description="""
+List Java classes in an APK which fail ART class verification.
+""")
+  parser.add_argument(
+      '--package',
+      '-P',
+      type=str,
+      default=None,
+      required=True,
+      help='Specify the full application package name')
+  parser.add_argument(
+      '--mapping',
+      '-m',
+      type=os.path.realpath,
+      default=None,
+      help='Mapping file for the desired APK to deobfuscate class names')
+  parser.add_argument(
+      '--hide-summary',
+      default=False,
+      action='store_true',
+      help='Do not output the total number of classes in each Status.')
+  parser.add_argument(
+      '--status',
+      type=str,
+      default='RetryVerificationAtRuntime',
+      choices=STATUSES,
+      help='Which category of classes to list at the end of the script')
+  parser.add_argument(
+      '--workdir',
+      '-w',
+      type=os.path.realpath,
+      default=None,
+      help=('Work directory for oatdump output (default = temporary '
+            'directory). If specified, this will not be cleaned up at the end '
+            'of the script (useful if you want to inspect oatdump output '
+            'manually)'))
+
+  script_common.AddEnvironmentArguments(parser)
+  script_common.AddDeviceArguments(parser)
+  logging_common.AddLoggingArguments(parser)
+
+  args = parser.parse_args()
+  devil_chromium.Initialize(adb_path=args.adb_path)
+  logging_common.InitializeLogging(args)
+
+  if args.workdir:
+    if not os.path.isdir(args.workdir):
+      raise RuntimeError('Specified working directory does not exist')
+    RealMain(args.mapping, args.devices, args.package, args.status,
+             args.hide_summary, args.workdir)
+    # Assume the user wants the workdir to persist (useful for debugging).
+    logging.warn('Not cleaning up explicitly-specified workdir: %s',
+                 args.workdir)
+  else:
+    with tempfile_ext.NamedTemporaryDirectory() as workdir:
+      RealMain(args.mapping, args.devices, args.package, args.status,
+               args.hide_summary, workdir)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/list_class_verification_failures_test.py b/src/build/android/list_class_verification_failures_test.py
new file mode 100644
index 0000000..4248064
--- /dev/null
+++ b/src/build/android/list_class_verification_failures_test.py
@@ -0,0 +1,236 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import list_class_verification_failures as list_verification
+
+import devil_chromium  # pylint: disable=unused-import
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+
+import mock  # pylint: disable=import-error
+
+
+def _CreateOdexLine(java_class_name, type_idx, verification_status):
+  """Create a rough approximation of a line of oatdump output."""
+  return ('{type_idx}: L{java_class}; (offset=0xac) (type_idx={type_idx}) '
+          '({verification}) '
+          '(OatClassNoneCompiled)'.format(type_idx=type_idx,
+                                          java_class=java_class_name,
+                                          verification=verification_status))
+
+
+def _ClassForName(name, classes):
+  return next(c for c in classes if c.name == name)
+
+
+class _DetermineDeviceToUseTest(unittest.TestCase):
+
+  def testDetermineDeviceToUse_emptyListWithOneAttachedDevice(self):
+    fake_attached_devices = ['123']
+    user_specified_devices = []
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        return_value=fake_attached_devices)
+    result = list_verification.DetermineDeviceToUse(user_specified_devices)
+    self.assertEqual(result, fake_attached_devices[0])
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+    # pylint: enable=no-member
+
+  def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self):
+    user_specified_devices = []
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        side_effect=device_errors.NoDevicesError())
+    with self.assertRaises(device_errors.NoDevicesError) as _:
+      list_verification.DetermineDeviceToUse(user_specified_devices)
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+    # pylint: enable=no-member
+
+  def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self):
+    user_specified_devices = ['123']
+    fake_attached_devices = ['123']
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        return_value=fake_attached_devices)
+    result = list_verification.DetermineDeviceToUse(user_specified_devices)
+    self.assertEqual(result, fake_attached_devices[0])
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(
+        device_arg=user_specified_devices)
+    # pylint: enable=no-member
+
+
+class _ListClassVerificationFailuresTest(unittest.TestCase):
+
+  def testPathToDexForPlatformVersion_noPaths(self):
+    sdk_int = version_codes.LOLLIPOP
+    paths_to_apk = []
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('Could not find data directory', message)
+
+  def testPathToDexForPlatformVersion_multiplePaths(self):
+    sdk_int = version_codes.LOLLIPOP
+    paths_to_apk = ['/first/path', '/second/path']
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('Expected exactly one path for', message)
+
+  def testPathToDexForPlatformVersion_dalvikApiLevel(self):
+    sdk_int = version_codes.KITKAT
+    paths_to_apk = ['/some/path']
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.UnsupportedDeviceError) as _:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+
+  def testPathToDexForPlatformVersion_lollipopArm(self):
+    sdk_int = version_codes.LOLLIPOP
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = 'arm'
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     ('/data/dalvik-cache/arm/data@app'
+                      '@package.name-1@base.apk@classes.dex'))
+
+  def testPathToDexForPlatformVersion_mashmallowArm(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = 'arm'
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     '/some/path/package.name-1/oat/arm/base.odex')
+
+  def testPathToDexForPlatformVersion_mashmallowArm64(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     '/some/path/package.name-1/oat/arm64/base.odex')
+
+  def testPathToDexForPlatformVersion_pieNoOdexFile(self):
+    sdk_int = version_codes.PIE
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=False)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('you must run dex2oat on debuggable apps on >= P', message)
+
+  def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=False)
+
+    with self.assertRaises(list_verification.DeviceOSError) as _:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+
+  def testListClasses_noProguardMap(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.JavaClass1', 6, 'StatusVerified'),
+        _CreateOdexLine('a.b.JavaClass2', 7,
+                        'StatusRetryVerificationAtRuntime'),
+    ]
+
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 None)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+  def testListClasses_proguardMap(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.ObfuscatedJavaClass1', 6, 'StatusVerified'),
+        _CreateOdexLine('a.b.ObfuscatedJavaClass2', 7,
+                        'StatusRetryVerificationAtRuntime'),
+    ]
+
+    mapping = {
+        'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1',
+        'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2',
+    }
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 mapping)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+  def testListClasses_noStatusPrefix(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.JavaClass1', 6, 'Verified'),
+        _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'),
+    ]
+
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 None)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+if __name__ == '__main__':
+  # Suppress logging messages.
+  unittest.main(buffer=True)
diff --git a/src/build/android/list_java_targets.py b/src/build/android/list_java_targets.py
new file mode 100755
index 0000000..d0689a6
--- /dev/null
+++ b/src/build/android/list_java_targets.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env vpython3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Lint as: python3
+"""Prints out available java targets.
+
+Examples:
+# List GN target for bundles:
+build/android/list_java_targets.py -C out/Default --type android_app_bundle \
+--gn-labels
+
+# List all android targets with types:
+build/android/list_java_targets.py -C out/Default --print-types
+
+# Build all apk targets:
+build/android/list_java_targets.py -C out/Default --type android_apk | xargs \
+autoninja -C out/Default
+
+# Show how many of each target type exist:
+build/android/list_java_targets.py -C out/Default --stats
+
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import subprocess
+import sys
+
+_SRC_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..',
+                                          '..'))
+sys.path.append(os.path.join(_SRC_ROOT, 'build', 'android'))
+from pylib import constants
+
+_VALID_TYPES = (
+    'android_apk',
+    'android_app_bundle',
+    'android_app_bundle_module',
+    'android_assets',
+    'android_resources',
+    'dist_aar',
+    'dist_jar',
+    'group',
+    'java_annotation_processor',
+    'java_binary',
+    'java_library',
+    'junit_binary',
+    'system_java_library',
+)
+
+
+def _run_ninja(output_dir, args):
+  cmd = [
+      'autoninja',
+      '-C',
+      output_dir,
+  ]
+  cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.run(cmd, check=True, stdout=sys.stderr)
+
+
+def _query_for_build_config_targets(output_dir):
+  # Query ninja rather than GN since it's faster.
+  cmd = ['ninja', '-C', output_dir, '-t', 'targets']
+  logging.info('Running: %r', cmd)
+  ninja_output = subprocess.run(cmd,
+                                check=True,
+                                capture_output=True,
+                                encoding='ascii').stdout
+  ret = []
+  SUFFIX = '__build_config_crbug_908819'
+  SUFFIX_LEN = len(SUFFIX)
+  for line in ninja_output.splitlines():
+    ninja_target = line.rsplit(':', 1)[0]
+    # Ignore root aliases by ensuring a : exists.
+    if ':' in ninja_target and ninja_target.endswith(SUFFIX):
+      ret.append(f'//{ninja_target[:-SUFFIX_LEN]}')
+  return ret
+
+
+class _TargetEntry(object):
+  def __init__(self, gn_target):
+    assert gn_target.startswith('//'), f'{gn_target} does not start with //'
+    assert ':' in gn_target, f'Non-root {gn_target} required'
+    self.gn_target = gn_target
+    self._build_config = None
+
+  @property
+  def ninja_target(self):
+    return self.gn_target[2:]
+
+  @property
+  def ninja_build_config_target(self):
+    return self.ninja_target + '__build_config_crbug_908819'
+
+  @property
+  def build_config_path(self):
+    """Returns the filepath of the project's .build_config."""
+    ninja_target = self.ninja_target
+    # Support targets at the root level. e.g. //:foo
+    if ninja_target[0] == ':':
+      ninja_target = ninja_target[1:]
+    subpath = ninja_target.replace(':', os.path.sep) + '.build_config'
+    return os.path.join(constants.GetOutDirectory(), 'gen', subpath)
+
+  def build_config(self):
+    """Reads and returns the project's .build_config JSON."""
+    if not self._build_config:
+      with open(self.build_config_path) as jsonfile:
+        self._build_config = json.load(jsonfile)
+    return self._build_config
+
+  def get_type(self):
+    """Returns the target type from its .build_config."""
+    return self.build_config()['deps_info']['type']
+
+  def proguard_enabled(self):
+    """Returns whether proguard runs for this target."""
+    # Modules set proguard_enabled, but the proguarding happens only once at the
+    # bundle level.
+    if self.get_type() == 'android_app_bundle_module':
+      return False
+    return self.build_config()['deps_info'].get('proguard_enabled', False)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument('-C',
+                      '--output-directory',
+                      help='If outdir is not provided, will attempt to guess.')
+  parser.add_argument('--gn-labels',
+                      action='store_true',
+                      help='Print GN labels rather than ninja targets')
+  parser.add_argument(
+      '--nested',
+      action='store_true',
+      help='Do not convert nested targets to their top-level equivalents. '
+      'E.g. Without this, foo_test__apk -> foo_test')
+  parser.add_argument('--print-types',
+                      action='store_true',
+                      help='Print type of each target')
+  parser.add_argument('--print-build-config-paths',
+                      action='store_true',
+                      help='Print path to the .build_config of each target')
+  parser.add_argument('--build',
+                      action='store_true',
+                      help='Build all .build_config files.')
+  parser.add_argument('--type',
+                      action='append',
+                      help='Restrict to targets of given type',
+                      choices=_VALID_TYPES)
+  parser.add_argument('--stats',
+                      action='store_true',
+                      help='Print counts of each target type.')
+  parser.add_argument('--proguard-enabled',
+                      action='store_true',
+                      help='Restrict to targets that have proguard enabled')
+  parser.add_argument('-v', '--verbose', default=0, action='count')
+  args = parser.parse_args()
+
+  args.build |= bool(args.type or args.proguard_enabled or args.print_types
+                     or args.stats)
+
+  logging.basicConfig(level=logging.WARNING - (10 * args.verbose),
+                      format='%(levelname).1s %(relativeCreated)6d %(message)s')
+
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  constants.CheckOutputDirectory()
+  output_dir = constants.GetOutDirectory()
+
+  # Query ninja for all __build_config_crbug_908819 targets.
+  targets = _query_for_build_config_targets(output_dir)
+  entries = [_TargetEntry(t) for t in targets]
+
+  if args.build:
+    logging.warning('Building %d .build_config files...', len(entries))
+    _run_ninja(output_dir, [e.ninja_build_config_target for e in entries])
+
+  if args.type:
+    entries = [e for e in entries if e.get_type() in args.type]
+
+  if args.proguard_enabled:
+    entries = [e for e in entries if e.proguard_enabled()]
+
+  if args.stats:
+    counts = collections.Counter(e.get_type() for e in entries)
+    for entry_type, count in sorted(counts.items()):
+      print(f'{entry_type}: {count}')
+  else:
+    for e in entries:
+      if args.gn_labels:
+        to_print = e.gn_target
+      else:
+        to_print = e.ninja_target
+
+      # Convert to top-level target
+      if not args.nested:
+        to_print = to_print.replace('__test_apk', '').replace('__apk', '')
+
+      if args.print_types:
+        to_print = f'{to_print}: {e.get_type()}'
+      elif args.print_build_config_paths:
+        to_print = f'{to_print}: {e.build_config_path}'
+
+      print(to_print)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/main_dex_classes.flags b/src/build/android/main_dex_classes.flags
new file mode 100644
index 0000000..31dbdd6
--- /dev/null
+++ b/src/build/android/main_dex_classes.flags
@@ -0,0 +1,52 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what should be kept in the main dex. Only used
+# during main dex list determination, not during actual proguarding.
+
+-keep @org.chromium.base.annotations.MainDex class * {
+  *;
+}
+
+-keepclasseswithmembers class * {
+  @org.chromium.base.annotations.MainDex <methods>;
+}
+
+# Assume all IDL-generated classes should be kept. They can't reference other
+# non-framework classes, so fairly low-risk.
+-keepclasseswithmembers class * {
+  public static ** asInterface(android.os.IBinder);
+}
+
+# Required when code coverage is enabled.
+-keep class com.vladium.** {
+  *;
+}
+
+# Renderers / GPU process don't load secondary dex.
+-keep public class * extends org.chromium.base.process_launcher.ChildProcessService {
+  *;
+}
+
+# Used by tests for secondary dex extraction.
+-keep class android.support.v4.content.ContextCompat {
+  *;
+}
+
+# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules
+# Ours differ in that:
+# 1. It omits -keeps for application / instrumentation / backupagents (these are
+#    redundant since they are added by aapt's main dex list rules output).
+# 2. Omits keep for Application.attachBaseContext(), which is overly broad.
+# 3. Omits keep for all annotations, which is also overly broad (and pulls in
+#    any class that has an @IntDef).
+
+######## START mainDexClasses.rules ########
+
+# Keep old fashion tests in the main dex or they'll be silently ignored by InstrumentationTestRunner
+-keep public class * extends android.test.InstrumentationTestCase {
+  <init>();
+}
+
+######## END mainDexClasses.rules ########
diff --git a/src/build/android/method_count.py b/src/build/android/method_count.py
new file mode 100755
index 0000000..a39a390
--- /dev/null
+++ b/src/build/android/method_count.py
@@ -0,0 +1,118 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import zipfile
+
+from pylib.dex import dex_parser
+
+
+class DexStatsCollector(object):
+  """Tracks count of method/field/string/type as well as unique methods."""
+
+  def __init__(self):
+    # Signatures of all methods from all seen dex files.
+    self._unique_methods = set()
+    # Map of label -> { metric -> count }.
+    self._counts_by_label = {}
+
+  def _CollectFromDexfile(self, label, dexfile):
+    assert label not in self._counts_by_label, 'exists: ' + label
+    self._counts_by_label[label] = {
+        'fields': dexfile.header.field_ids_size,
+        'methods': dexfile.header.method_ids_size,
+        'strings': dexfile.header.string_ids_size,
+        'types': dexfile.header.type_ids_size,
+    }
+    self._unique_methods.update(dexfile.IterMethodSignatureParts())
+
+  def CollectFromZip(self, label, path):
+    """Add dex stats from an .apk/.jar/.aab/.zip."""
+    with zipfile.ZipFile(path, 'r') as z:
+      for subpath in z.namelist():
+        if not re.match(r'.*classes\d*\.dex$', subpath):
+          continue
+        dexfile = dex_parser.DexFile(bytearray(z.read(subpath)))
+        self._CollectFromDexfile('{}!{}'.format(label, subpath), dexfile)
+
+  def CollectFromDex(self, label, path):
+    """Add dex stats from a .dex file."""
+    with open(path, 'rb') as f:
+      dexfile = dex_parser.DexFile(bytearray(f.read()))
+    self._CollectFromDexfile(label, dexfile)
+
+  def MergeFrom(self, parent_label, other):
+    """Add dex stats from another DexStatsCollector."""
+    # pylint: disable=protected-access
+    for label, other_counts in other._counts_by_label.items():
+      new_label = '{}-{}'.format(parent_label, label)
+      self._counts_by_label[new_label] = other_counts.copy()
+    self._unique_methods.update(other._unique_methods)
+    # pylint: enable=protected-access
+
+  def GetUniqueMethodCount(self):
+    """Returns total number of unique methods across encountered dex files."""
+    return len(self._unique_methods)
+
+  def GetCountsByLabel(self):
+    """Returns dict of label -> {metric -> count}."""
+    return self._counts_by_label
+
+  def GetTotalCounts(self):
+    """Returns dict of {metric -> count}, where |count| is sum(metric)."""
+    ret = {}
+    for metric in ('fields', 'methods', 'strings', 'types'):
+      ret[metric] = sum(x[metric] for x in self._counts_by_label.values())
+    return ret
+
+  def GetDexCacheSize(self, pre_oreo):
+    """Returns number of bytes of dirty RAM is consumed from all dex files."""
+    # Dex Cache was optimized in Android Oreo:
+    # https://source.android.com/devices/tech/dalvik/improvements#dex-cache-removal
+    if pre_oreo:
+      total = sum(self.GetTotalCounts().values())
+    else:
+      total = sum(c['methods'] for c in self._counts_by_label.values())
+    return total * 4  # 4 bytes per entry.
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('paths', nargs='+')
+  args = parser.parse_args()
+
+  collector = DexStatsCollector()
+  for path in args.paths:
+    if os.path.splitext(path)[1] in ('.zip', '.apk', '.jar', '.aab'):
+      collector.CollectFromZip(path, path)
+    else:
+      collector.CollectFromDex(path, path)
+
+  counts_by_label = collector.GetCountsByLabel()
+  for label, counts in sorted(counts_by_label.items()):
+    print('{}:'.format(label))
+    for metric, count in sorted(counts.items()):
+      print('  {}:'.format(metric), count)
+    print()
+
+  if len(counts_by_label) > 1:
+    print('Totals:')
+    for metric, count in sorted(collector.GetTotalCounts().items()):
+      print('  {}:'.format(metric), count)
+    print()
+
+  print('Unique Methods:', collector.GetUniqueMethodCount())
+  print('DexCache (Pre-Oreo):', collector.GetDexCacheSize(pre_oreo=True),
+        'bytes of dirty memory')
+  print('DexCache (Oreo+):', collector.GetDexCacheSize(pre_oreo=False),
+        'bytes of dirty memory')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/multidex.flags b/src/build/android/multidex.flags
new file mode 100644
index 0000000..e3543c1
--- /dev/null
+++ b/src/build/android/multidex.flags
@@ -0,0 +1,8 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# When multidex is enabled, need to keep the @MainDex annotation so that it
+# can be used to create the main dex list.
+-keepattributes *Annotations*
+-keep @interface org.chromium.base.annotations.MainDex
diff --git a/src/build/android/native_flags/BUILD.gn b/src/build/android/native_flags/BUILD.gn
new file mode 100644
index 0000000..9c5be70
--- /dev/null
+++ b/src/build/android/native_flags/BUILD.gn
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_toolchain == default_toolchain) {
+  import("//build/toolchain/toolchain.gni")
+
+  # A toolchain that will capture compiler and linker arguments to a file.
+  toolchain("flagcapture") {
+    tool("cxx") {
+      cxx = rebase_path("argcapture.py", root_build_dir)
+      command = "$cxx {{output}} {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}"
+      outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ]
+    }
+    tool("solink") {
+      solink = rebase_path("argcapture.py", root_build_dir)
+      command = "$solink {{output}} {{ldflags}}"
+      outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ]
+    }
+    tool("alink") {
+      command = "this-should-never-run"
+      outputs = [ "this-will-never-exist" ]
+    }
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+  }
+} else if (current_toolchain == "//build/android/native_flags:flagcapture") {
+  # This will record flags from all default configs of the default toolchain.
+  source_set("default_ccflags") {
+    sources = [ "empty.cc" ]
+  }
+  shared_library("default_ldflags") {
+    no_default_deps = true
+  }
+}
diff --git a/src/build/android/native_flags/argcapture.py b/src/build/android/native_flags/argcapture.py
new file mode 100755
index 0000000..159b03a
--- /dev/null
+++ b/src/build/android/native_flags/argcapture.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes arguments to a file."""
+
+import sys
+
+
+def main():
+  with open(sys.argv[1], 'w') as f:
+    f.write('\n'.join(sys.argv[2:]))
+    f.write('\n')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/native_flags/empty.cc b/src/build/android/native_flags/empty.cc
new file mode 100644
index 0000000..94aac14
--- /dev/null
+++ b/src/build/android/native_flags/empty.cc
@@ -0,0 +1,5 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file just needs to exist to appease GN.
diff --git a/src/build/android/provision_devices.py b/src/build/android/provision_devices.py
new file mode 100755
index 0000000..5fb4d93
--- /dev/null
+++ b/src/build/android/provision_devices.py
@@ -0,0 +1,563 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+  ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import datetime
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See crbug.com/584730 and https://bugs.python.org/issue7980.
+import _strptime  # pylint: disable=unused-import
+
+import devil_chromium
+from devil.android import battery_utils
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.sdk import keyevent
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+from devil.utils import run_tests_helper
+from devil.utils import timeout_retry
+from pylib import constants
+from pylib import device_settings
+from pylib.constants import host_paths
+
+_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle']
+_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
+_TOMBSTONE_REGEX = re.compile('tombstone.*')
+
+
+class _DEFAULT_TIMEOUTS(object):
+  # L can take a while to reboot after a wipe.
+  LOLLIPOP = 600
+  PRE_LOLLIPOP = 180
+
+  HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+  WIPE = 'wipe'
+  PROPERTIES = 'properties'
+  FINISH = 'finish'
+
+  ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(args):
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  devices = [
+      d for d in device_utils.DeviceUtils.HealthyDevices(denylist)
+      if not args.emulators or d.adb.is_emulator
+  ]
+  if args.device:
+    devices = [d for d in devices if d == args.device]
+  if not devices:
+    raise device_errors.DeviceUnreachableError(args.device)
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  if args.emulators:
+    parallel_devices.pMap(SetProperties, args)
+  else:
+    parallel_devices.pMap(ProvisionDevice, denylist, args)
+  if args.auto_reconnect:
+    _LaunchHostHeartbeat()
+  denylisted_devices = denylist.Read() if denylist else []
+  if args.output_device_denylist:
+    with open(args.output_device_denylist, 'w') as f:
+      json.dump(denylisted_devices, f)
+  if all(d in denylisted_devices for d in devices):
+    raise device_errors.NoDevicesError
+  return 0
+
+
+def ProvisionDevice(device, denylist, options):
+  def should_run_phase(phase_name):
+    return not options.phases or phase_name in options.phases
+
+  def run_phase(phase_func, reboot_timeout, reboot=True):
+    try:
+      device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+    except device_errors.CommandTimeoutError:
+      logging.error('Device did not finish booting. Will try to reboot.')
+      device.Reboot(timeout=reboot_timeout)
+    phase_func(device, options)
+    if reboot:
+      device.Reboot(False, retries=0)
+      device.adb.WaitForDevice()
+
+  try:
+    if options.reboot_timeout:
+      reboot_timeout = options.reboot_timeout
+    elif device.build_version_sdk >= version_codes.LOLLIPOP:
+      reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+    else:
+      reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+    if should_run_phase(_PHASES.WIPE):
+      if (options.chrome_specific_wipe or device.IsUserBuild() or
+          device.build_version_sdk >= version_codes.MARSHMALLOW):
+        run_phase(WipeChromeData, reboot_timeout)
+      else:
+        run_phase(WipeDevice, reboot_timeout)
+
+    if should_run_phase(_PHASES.PROPERTIES):
+      run_phase(SetProperties, reboot_timeout)
+
+    if should_run_phase(_PHASES.FINISH):
+      run_phase(FinishProvisioning, reboot_timeout, reboot=False)
+
+    if options.chrome_specific_wipe:
+      package = "com.google.android.gms"
+      version_name = device.GetApplicationVersion(package)
+      logging.info("Version name for %s is %s", package, version_name)
+
+    CheckExternalStorage(device)
+
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timed out waiting for device %s. Adding to denylist.',
+                      str(device))
+    if denylist:
+      denylist.Extend([str(device)], reason='provision_timeout')
+
+  except (device_errors.CommandFailedError,
+          device_errors.DeviceUnreachableError):
+    logging.exception('Failed to provision device %s. Adding to denylist.',
+                      str(device))
+    if denylist:
+      denylist.Extend([str(device)], reason='provision_failure')
+
+
+def CheckExternalStorage(device):
+  """Checks that storage is writable and if not makes it writable.
+
+  Arguments:
+    device: The device to check.
+  """
+  try:
+    with device_temp_file.DeviceTempFile(
+        device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+      device.WriteFile(f.name, 'test')
+  except device_errors.CommandFailedError:
+    logging.info('External storage not writable. Remounting / as RW')
+    device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
+                           check_return=True, as_root=True)
+    device.EnableRoot()
+    with device_temp_file.DeviceTempFile(
+        device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+      device.WriteFile(f.name, 'test')
+
+def WipeChromeData(device, options):
+  """Wipes chrome specific data from device
+
+  (1) uninstall any app whose name matches *chrom*, except
+      com.android.chrome, which is the chrome stable package. Doing so also
+      removes the corresponding dirs under /data/data/ and /data/app/
+  (2) remove any dir under /data/app-lib/ whose name matches *chrom*
+  (3) remove any files under /data/tombstones/ whose name matches "tombstone*"
+  (4) remove /data/local.prop if there is any
+  (5) remove /data/local/chrome-command-line if there is any
+  (6) remove anything under /data/local/.config/ if the dir exists
+      (this is telemetry related)
+  (7) remove anything under /data/local/tmp/
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    if device.IsUserBuild():
+      _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+                        constants.PACKAGE_INFO['chrome_stable'].package)
+      device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+                             check_return=True)
+      device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True)
+    else:
+      device.EnableRoot()
+      _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+                        constants.PACKAGE_INFO['chrome_stable'].package)
+      _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
+      _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
+
+      _WipeFileOrDir(device, '/data/local.prop')
+      _WipeFileOrDir(device, '/data/local/chrome-command-line')
+      _WipeFileOrDir(device, '/data/local/.config/')
+      _WipeFileOrDir(device, '/data/local/tmp/')
+      device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+                             check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def WipeDevice(device, options):
+  """Wipes data from device, keeping only the adb_keys for authorization.
+
+  After wiping data on a device that has been authorized, adb can still
+  communicate with the device, but after reboot the device will need to be
+  re-authorized because the adb keys file is stored in /data/misc/adb/.
+  Thus, adb_keys file is rewritten so the device does not need to be
+  re-authorized.
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    device.EnableRoot()
+    device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+    if device_authorized:
+      adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+                                 as_root=True).splitlines()
+    device.RunShellCommand(['wipe', 'data'],
+                           as_root=True, check_return=True)
+    device.adb.WaitForDevice()
+
+    if device_authorized:
+      adb_keys_set = set(adb_keys)
+      for adb_key_file in options.adb_key_files or []:
+        try:
+          with open(adb_key_file, 'r') as f:
+            adb_public_keys = f.readlines()
+          adb_keys_set.update(adb_public_keys)
+        except IOError:
+          logging.warning('Unable to find adb keys file %s.', adb_key_file)
+      _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+  dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+  device.RunShellCommand(['mkdir', '-p', dir_path],
+                         as_root=True, check_return=True)
+  device.RunShellCommand(['restorecon', dir_path],
+                         as_root=True, check_return=True)
+  device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+  device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+                         as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+  try:
+    device.EnableRoot()
+  except device_errors.CommandFailedError as e:
+    logging.warning(str(e))
+
+  if not device.IsUserBuild():
+    _ConfigureLocalProperties(device, options.enable_java_debug)
+  else:
+    logging.warning('Cannot configure properties in user builds.')
+  device_settings.ConfigureContentSettings(
+      device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+  if options.disable_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+  if options.disable_mock_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+  device_settings.SetLockScreenSettings(device)
+  if options.disable_network:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.NETWORK_DISABLED_SETTINGS)
+    if device.build_version_sdk >= version_codes.MARSHMALLOW:
+      # Ensure that NFC is also switched off.
+      device.RunShellCommand(['svc', 'nfc', 'disable'],
+                             as_root=True, check_return=True)
+
+  if options.disable_system_chrome:
+    # The system chrome version on the device interferes with some tests.
+    device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
+                           check_return=True)
+
+  if options.remove_system_webview:
+    if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS):
+      logging.info('System WebView exists and needs to be removed')
+      if device.HasRoot():
+        # Disabled Marshmallow's Verity security feature
+        if device.build_version_sdk >= version_codes.MARSHMALLOW:
+          device.adb.DisableVerity()
+          device.Reboot()
+          device.WaitUntilFullyBooted()
+          device.EnableRoot()
+
+        # This is required, e.g., to replace the system webview on a device.
+        device.adb.Remount()
+        device.RunShellCommand(['stop'], check_return=True)
+        device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS,
+                               check_return=True)
+        device.RunShellCommand(['start'], check_return=True)
+      else:
+        logging.warning('Cannot remove system webview from a non-rooted device')
+    else:
+      logging.info('System WebView already removed')
+
+  # Some device types can momentarily disappear after setting properties.
+  device.adb.WaitForDevice()
+
+
+def _ConfigureLocalProperties(device, java_debug=True):
+  """Set standard readonly testing device properties prior to reboot."""
+  local_props = [
+      'persist.sys.usb.config=adb',
+      'ro.monkey=1',
+      'ro.test_harness=1',
+      'ro.audio.silent=1',
+      'ro.setupwizard.mode=DISABLED',
+      ]
+  if java_debug:
+    local_props.append(
+        '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+    local_props.append('debug.checkjni=1')
+  try:
+    device.WriteFile(
+        device.LOCAL_PROPERTIES_PATH,
+        '\n'.join(local_props), as_root=True)
+    # Android will not respect the local props file if it is world writable.
+    device.RunShellCommand(
+        ['chmod', '644', device.LOCAL_PROPERTIES_PATH],
+        as_root=True, check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+  # The lockscreen can't be disabled on user builds, so send a keyevent
+  # to unlock it.
+  if device.IsUserBuild():
+    device.SendKeyEvent(keyevent.KEYCODE_MENU)
+
+  if options.min_battery_level is not None:
+    battery = battery_utils.BatteryUtils(device)
+    try:
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+    except device_errors.DeviceChargingError:
+      device.Reboot()
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+
+  if options.max_battery_temp is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to let battery cool to specified temperature.')
+
+  def _set_and_verify_date():
+    if device.build_version_sdk >= version_codes.MARSHMALLOW:
+      date_format = '%m%d%H%M%Y.%S'
+      set_date_command = ['date', '-u']
+      get_date_command = ['date', '-u']
+    else:
+      date_format = '%Y%m%d.%H%M%S'
+      set_date_command = ['date', '-s']
+      get_date_command = ['date']
+
+    # TODO(jbudorick): This is wrong on pre-M devices -- get/set are
+    # dealing in local time, but we're setting based on GMT.
+    strgmtime = time.strftime(date_format, time.gmtime())
+    set_date_command.append(strgmtime)
+    device.RunShellCommand(set_date_command, as_root=True, check_return=True)
+
+    get_date_command.append('+"%Y%m%d.%H%M%S"')
+    device_time = device.RunShellCommand(
+        get_date_command, as_root=True, single_line=True).replace('"', '')
+    device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
+    correct_time = datetime.datetime.strptime(strgmtime, date_format)
+    tdelta = (correct_time - device_time).seconds
+    if tdelta <= 1:
+      logging.info('Date/time successfully set on %s', device)
+      return True
+    else:
+      logging.error('Date mismatch. Device: %s Correct: %s',
+                    device_time.isoformat(), correct_time.isoformat())
+      return False
+
+  # Sometimes the date is not set correctly on the devices. Retry on failure.
+  if device.IsUserBuild():
+    # TODO(bpastene): Figure out how to set the date & time on user builds.
+    pass
+  else:
+    if not timeout_retry.WaitFor(
+        _set_and_verify_date, wait_period=1, max_tries=2):
+      raise device_errors.CommandFailedError(
+          'Failed to set date & time.', device_serial=str(device))
+
+  props = device.RunShellCommand('getprop', check_return=True)
+  for prop in props:
+    logging.info('  %s', prop)
+  if options.auto_reconnect:
+    _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _UninstallIfMatch(device, pattern, app_to_keep):
+  installed_packages = device.RunShellCommand(['pm', 'list', 'packages'])
+  installed_system_packages = [
+      pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list',
+                                                           'packages', '-s'])]
+  for package_output in installed_packages:
+    package = package_output.split(":")[1]
+    if pattern.match(package) and not package == app_to_keep:
+      if not device.IsUserBuild() or package not in installed_system_packages:
+        device.Uninstall(package)
+
+
+def _WipeUnderDirIfMatch(device, path, pattern):
+  for filename in device.ListDirectory(path):
+    if pattern.match(filename):
+      _WipeFileOrDir(device, posixpath.join(path, filename))
+
+
+def _WipeFileOrDir(device, path):
+  if device.PathExists(path):
+    device.RunShellCommand(['rm', '-rf', path], check_return=True)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+  """Pushes and launches the adb_reboot binary on the device.
+
+  Arguments:
+    device: The DeviceUtils instance for the device to which the adb_reboot
+            binary should be pushed.
+    target: The build target (example, Debug or Release) which helps in
+            locating the adb_reboot binary.
+  """
+  logging.info('Will push and launch adb_reboot on %s', str(device))
+  # Kill if adb_reboot is already running.
+  device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+  # Push adb_reboot
+  logging.info('  Pushing adb_reboot ...')
+  adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                            'out/%s/adb_reboot' % target)
+  device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+  # Launch adb_reboot
+  logging.info('  Launching adb_reboot ...')
+  device.RunShellCommand(
+      ['/data/local/tmp/adb_reboot'],
+      check_return=True)
+
+
+def _LaunchHostHeartbeat():
+  # Kill if existing host_heartbeat
+  KillHostHeartbeat()
+  # Launch a new host_heartbeat
+  logging.info('Spawning host heartbeat...')
+  subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                 'build/android/host_heartbeat.py')])
+
+def KillHostHeartbeat():
+  ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+  stdout, _ = ps.communicate()
+  matches = re.findall('\\n.*host_heartbeat.*', stdout)
+  for match in matches:
+    logging.info('An instance of host heart beart running... will kill')
+    pid = re.findall(r'(\S+)', match)[1]
+    subprocess.call(['kill', str(pid)])
+
+def main():
+  # Recommended options on perf bots:
+  # --disable-network
+  #     TODO(tonyg): We eventually want network on. However, currently radios
+  #     can cause perfbots to drain faster than they charge.
+  # --min-battery-level 95
+  #     Some perf bots run benchmarks with USB charging disabled which leads
+  #     to gradual draining of the battery. We must wait for a full charge
+  #     before starting a run in order to keep the devices online.
+
+  parser = argparse.ArgumentParser(
+      description='Provision Android devices with settings required for bots.')
+  parser.add_argument('-d', '--device', metavar='SERIAL',
+                      help='the serial number of the device to be provisioned'
+                      ' (the default is to provision all devices attached)')
+  parser.add_argument('--adb-path',
+                      help='Absolute path to the adb binary to use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+                      dest='phases',
+                      help='Phases of provisioning to run. '
+                           '(If omitted, all phases will be run.)')
+  parser.add_argument('--skip-wipe', action='store_true', default=False,
+                      help="don't wipe device data during provisioning")
+  parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+                      help='when wiping the device, max number of seconds to'
+                      ' wait after each reboot '
+                      '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+  parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+                      help='wait for the device to reach this minimum battery'
+                      ' level before trying to continue')
+  parser.add_argument('--disable-location', action='store_true',
+                      help='disable Google location services on devices')
+  parser.add_argument('--disable-mock-location', action='store_true',
+                      default=False, help='Set ALLOW_MOCK_LOCATION to false')
+  parser.add_argument('--disable-network', action='store_true',
+                      help='disable network access on devices')
+  parser.add_argument('--disable-java-debug', action='store_false',
+                      dest='enable_java_debug', default=True,
+                      help='disable Java property asserts and JNI checking')
+  parser.add_argument('--disable-system-chrome', action='store_true',
+                      help='Disable the system chrome from devices.')
+  parser.add_argument('--remove-system-webview', action='store_true',
+                      help='Remove the system webview from devices.')
+  parser.add_argument('-t', '--target', default='Debug',
+                      help='the build target (default: %(default)s)')
+  parser.add_argument('-r', '--auto-reconnect', action='store_true',
+                      help='push binary which will reboot the device on adb'
+                      ' disconnections')
+  parser.add_argument('--adb-key-files', type=str, nargs='+',
+                      help='list of adb keys to push to device')
+  parser.add_argument('-v', '--verbose', action='count', default=1,
+                      help='Log more information.')
+  parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+                      help='Wait for the battery to have this temp or lower.')
+  parser.add_argument('--output-device-denylist',
+                      help='Json file to output the device denylist.')
+  parser.add_argument('--chrome-specific-wipe', action='store_true',
+                      help='only wipe chrome specific data during provisioning')
+  parser.add_argument('--emulators', action='store_true',
+                      help='provision only emulators and ignore usb devices')
+  args = parser.parse_args()
+  constants.SetBuildType(args.target)
+
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devil_chromium.Initialize(adb_path=args.adb_path)
+
+  try:
+    return ProvisionDevices(args)
+  except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
+    logging.exception('Unable to provision local devices.')
+    return exit_codes.INFRA
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/__init__.py b/src/build/android/pylib/__init__.py
new file mode 100644
index 0000000..c9a4c03
--- /dev/null
+++ b/src/build/android/pylib/__init__.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+_THIRD_PARTY_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..', 'third_party'))
+
+_CATAPULT_PATH = os.path.join(_THIRD_PARTY_PATH, 'catapult')
+
+_DEVIL_PATH = os.path.join(_CATAPULT_PATH, 'devil')
+
+_PYTRACE_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_trace_event')
+
+_PY_UTILS_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_utils')
+
+_SIX_PATH = os.path.join(_THIRD_PARTY_PATH, 'six', 'src')
+
+_TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing')
+
+
+if _DEVIL_PATH not in sys.path:
+  sys.path.append(_DEVIL_PATH)
+
+if _PYTRACE_PATH not in sys.path:
+  sys.path.append(_PYTRACE_PATH)
+
+if _PY_UTILS_PATH not in sys.path:
+  sys.path.append(_PY_UTILS_PATH)
+
+if _TRACE2HTML_PATH not in sys.path:
+  sys.path.append(_TRACE2HTML_PATH)
+
+if _SIX_PATH not in sys.path:
+  sys.path.append(_SIX_PATH)
diff --git a/src/build/android/pylib/android/__init__.py b/src/build/android/pylib/android/__init__.py
new file mode 100644
index 0000000..a67c350
--- /dev/null
+++ b/src/build/android/pylib/android/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/android/logcat_symbolizer.py b/src/build/android/pylib/android/logcat_symbolizer.py
new file mode 100644
index 0000000..720629b
--- /dev/null
+++ b/src/build/android/pylib/android/logcat_symbolizer.py
@@ -0,0 +1,98 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+from devil.android import logcat_monitor
+
+BACKTRACE_LINE_RE = re.compile(r'#\d+')
+THREADTIME_RE = re.compile(
+    logcat_monitor.LogcatMonitor.THREADTIME_RE_FORMAT % (
+        r' *\S* *', r' *\S* *', r' *\S* *', r' *\S* *', r'.*'))
+
+def SymbolizeLogcat(logcat, dest, symbolizer, abi):
+  """Symbolize stack trace in the logcat.
+
+  Symbolize the logcat and write the symbolized logcat to a new file.
+
+  Args:
+    logcat: Path to logcat file.
+    dest: Path to where to write the symbolized logcat.
+    symbolizer: The stack symbolizer to symbolize stack trace in logcat.
+    abi: The device's product_cpu_abi. Symbolizer needs it to symbolize.
+
+  A sample logcat that needs to be symbolized, after stripping the prefix,
+  such as '08-07 18:39:37.692 28649 28649 E Ion     : ', would be:
+  Build fingerprint: 'google/shamu/shamu:7.1.1/NMF20B/3370:userdebug/dev-keys'
+  Revision: '0'
+  ABI: 'arm'
+  pid: 28936, tid: 28936, name: chromium.chrome  >>> org.chromium.chrome <<<
+  signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
+  Abort message: '[FATAL:debug_urls.cc(151)] Check failed: false.
+  #00 0x63e16c41 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0006cc4
+  #01 0x63f19be3 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016fbe
+  #02 0x63f19737 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016f73
+  #03 0x63f18ddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016edd
+  #04 0x63f18b79 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016eb7
+  #05 0xab53f319 /system/lib/libart.so+0x000a3319
+  #06
+     r0 00000000  r1 00007108  r2 00000006  r3 00000008
+     r4 ae60258c  r5 00000006  r6 ae602534  r7 0000010c
+     r8 bede5cd0  r9 00000030  sl 00000000  fp 9265a800
+     ip 0000000b  sp bede5c38  lr ac8e5537  pc ac8e7da0  cpsr 600f0010
+
+  backtrace:
+     #00 pc 00049da0  /system/lib/libc.so (tgkill+12)
+     #01 pc 00047533  /system/lib/libc.so (pthread_kill+34)
+     #02 pc 0001d635  /system/lib/libc.so (raise+10)
+     #03 pc 00019181  /system/lib/libc.so (__libc_android_abort+34)
+     #04 pc 00017048  /system/lib/libc.so (abort+4)
+     #05 pc 00948605  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #06 pc 002c9f73  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #07 pc 003ccbe1  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #08 pc 003cc735  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #09 pc 003cbddf  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #10 pc 003cbb77  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+  """
+
+  with open(logcat) as logcat_file:
+    with open(dest, 'w') as dest_file:
+      # The current stack script will only print out the symbolized stack,
+      # and completely ignore logs other than the crash log that is used for
+      # symbolization, if any exists. Thus the code here extracts the
+      # crash log inside the logcat and pass only the crash log to the script,
+      # because we don't want to lose other information in the logcat that,
+      # if passed to the stack script, will just be ignored by it.
+      # TODO(crbug.com/755225): Rewrite the logic here.
+      outside_of_crash_log = True
+      in_lower_half_crash = False
+      data_to_symbolize = []
+
+      for line in logcat_file:
+        if outside_of_crash_log:
+          # Check whether it is the start of crash log.
+          if 'Build fingerprint: ' in line:
+            outside_of_crash_log = False
+            # Only include necessary information for symbolization.
+            # The logic here that removes date, time, proc_id etc.
+            # should be in sync with _THREADTIME_RE_FORMAT in logcat_monitor.
+            data_to_symbolize.append(
+              re.search(THREADTIME_RE, line).group(7))
+          else:
+            dest_file.write(line)
+        else:
+          # Once we have reached the end of the backtrace section,
+          # we will start symbolizing.
+          if in_lower_half_crash and not bool(BACKTRACE_LINE_RE.search(line)):
+            outside_of_crash_log = True
+            in_lower_half_crash = False
+            symbolized_lines = symbolizer.ExtractAndResolveNativeStackTraces(
+                data_to_symbolize, abi)
+            dest_file.write('\n'.join(symbolized_lines) + '\n' + line)
+            data_to_symbolize = []
+          else:
+            if not in_lower_half_crash and 'backtrace:' in line:
+              in_lower_half_crash = True
+            data_to_symbolize.append(
+                re.search(THREADTIME_RE, line).group(7))
diff --git a/src/build/android/pylib/base/__init__.py b/src/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/base/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/base/base_test_result.py b/src/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000..03f00f2
--- /dev/null
+++ b/src/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,264 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+from __future__ import absolute_import
+import threading
+import six
+
+
+class ResultType(object):
+  """Class enumerating test types."""
+  # The test passed.
+  PASS = 'SUCCESS'
+
+  # The test was intentionally skipped.
+  SKIP = 'SKIPPED'
+
+  # The test failed.
+  FAIL = 'FAILURE'
+
+  # The test caused the containing process to crash.
+  CRASH = 'CRASH'
+
+  # The test timed out.
+  TIMEOUT = 'TIMEOUT'
+
+  # The test ran, but we couldn't determine what happened.
+  UNKNOWN = 'UNKNOWN'
+
+  # The test did not run.
+  NOTRUN = 'NOTRUN'
+
+  @staticmethod
+  def GetTypes():
+    """Get a list of all test types."""
+    return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+            ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN,
+            ResultType.NOTRUN]
+
+
+class BaseTestResult(object):
+  """Base class for a single test result."""
+
+  def __init__(self, name, test_type, duration=0, log=''):
+    """Construct a BaseTestResult.
+
+    Args:
+      name: Name of the test which defines uniqueness.
+      test_type: Type of the test result as defined in ResultType.
+      duration: Time it took for the test to run in milliseconds.
+      log: An optional string listing any errors.
+    """
+    assert name
+    assert test_type in ResultType.GetTypes()
+    self._name = name
+    self._test_type = test_type
+    self._duration = duration
+    self._log = log
+    self._links = {}
+
+  def __str__(self):
+    return self._name
+
+  def __repr__(self):
+    return self._name
+
+  def __cmp__(self, other):
+    # pylint: disable=W0212
+    return cmp(self._name, other._name)
+
+  def __hash__(self):
+    return hash(self._name)
+
+  def SetName(self, name):
+    """Set the test name.
+
+    Because we're putting this into a set, this should only be used if moving
+    this test result into another set.
+    """
+    self._name = name
+
+  def GetName(self):
+    """Get the test name."""
+    return self._name
+
+  def SetType(self, test_type):
+    """Set the test result type."""
+    assert test_type in ResultType.GetTypes()
+    self._test_type = test_type
+
+  def GetType(self):
+    """Get the test result type."""
+    return self._test_type
+
+  def GetDuration(self):
+    """Get the test duration."""
+    return self._duration
+
+  def SetLog(self, log):
+    """Set the test log."""
+    self._log = log
+
+  def GetLog(self):
+    """Get the test log."""
+    return self._log
+
+  def SetLink(self, name, link_url):
+    """Set link with test result data."""
+    self._links[name] = link_url
+
+  def GetLinks(self):
+    """Get dict containing links to test result data."""
+    return self._links
+
+
+class TestRunResults(object):
+  """Set of results for a test run."""
+
+  def __init__(self):
+    self._links = {}
+    self._results = set()
+    self._results_lock = threading.RLock()
+
+  def SetLink(self, name, link_url):
+    """Add link with test run results data."""
+    self._links[name] = link_url
+
+  def GetLinks(self):
+    """Get dict containing links to test run result data."""
+    return self._links
+
+  def GetLogs(self):
+    """Get the string representation of all test logs."""
+    with self._results_lock:
+      s = []
+      for test_type in ResultType.GetTypes():
+        if test_type != ResultType.PASS:
+          for t in sorted(self._GetType(test_type)):
+            log = t.GetLog()
+            if log:
+              s.append('[%s] %s:' % (test_type, t))
+              s.append(six.text_type(log, 'utf-8'))
+      return '\n'.join(s)
+
+  def GetGtestForm(self):
+    """Get the gtest string representation of this object."""
+    with self._results_lock:
+      s = []
+      plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+      tests = lambda n: plural(n, 'test', 'tests')
+
+      s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+      s.append('[  PASSED  ] %s.' % (tests(len(self.GetPass()))))
+
+      skipped = self.GetSkip()
+      if skipped:
+        s.append('[  SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+        for t in sorted(skipped):
+          s.append('[  SKIPPED ] %s' % str(t))
+
+      all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+          self.GetUnknown())
+      if all_failures:
+        s.append('[  FAILED  ] %s, listed below:' % tests(len(all_failures)))
+        for t in sorted(self.GetFail()):
+          s.append('[  FAILED  ] %s' % str(t))
+        for t in sorted(self.GetCrash()):
+          s.append('[  FAILED  ] %s (CRASHED)' % str(t))
+        for t in sorted(self.GetTimeout()):
+          s.append('[  FAILED  ] %s (TIMEOUT)' % str(t))
+        for t in sorted(self.GetUnknown()):
+          s.append('[  FAILED  ] %s (UNKNOWN)' % str(t))
+        s.append('')
+        s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+      return '\n'.join(s)
+
+  def GetShortForm(self):
+    """Get the short string representation of this object."""
+    with self._results_lock:
+      s = []
+      s.append('ALL: %d' % len(self._results))
+      for test_type in ResultType.GetTypes():
+        s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+      return ''.join([x.ljust(15) for x in s])
+
+  def __str__(self):
+    return self.GetGtestForm()
+
+  def AddResult(self, result):
+    """Add |result| to the set.
+
+    Args:
+      result: An instance of BaseTestResult.
+    """
+    assert isinstance(result, BaseTestResult)
+    with self._results_lock:
+      self._results.discard(result)
+      self._results.add(result)
+
+  def AddResults(self, results):
+    """Add |results| to the set.
+
+    Args:
+      results: An iterable of BaseTestResult objects.
+    """
+    with self._results_lock:
+      for t in results:
+        self.AddResult(t)
+
+  def AddTestRunResults(self, results):
+    """Add the set of test results from |results|.
+
+    Args:
+      results: An instance of TestRunResults.
+    """
+    assert isinstance(results, TestRunResults), (
+           'Expected TestRunResult object: %s' % type(results))
+    with self._results_lock:
+      # pylint: disable=W0212
+      self._results.update(results._results)
+
+  def GetAll(self):
+    """Get the set of all test results."""
+    with self._results_lock:
+      return self._results.copy()
+
+  def _GetType(self, test_type):
+    """Get the set of test results with the given test type."""
+    with self._results_lock:
+      return set(t for t in self._results if t.GetType() == test_type)
+
+  def GetPass(self):
+    """Get the set of all passed test results."""
+    return self._GetType(ResultType.PASS)
+
+  def GetSkip(self):
+    """Get the set of all skipped test results."""
+    return self._GetType(ResultType.SKIP)
+
+  def GetFail(self):
+    """Get the set of all failed test results."""
+    return self._GetType(ResultType.FAIL)
+
+  def GetCrash(self):
+    """Get the set of all crashed test results."""
+    return self._GetType(ResultType.CRASH)
+
+  def GetTimeout(self):
+    """Get the set of all timed out test results."""
+    return self._GetType(ResultType.TIMEOUT)
+
+  def GetUnknown(self):
+    """Get the set of all unknown test results."""
+    return self._GetType(ResultType.UNKNOWN)
+
+  def GetNotPass(self):
+    """Get the set of all non-passed test results."""
+    return self.GetAll() - self.GetPass()
+
+  def DidRunPass(self):
+    """Return whether the test run was successful."""
+    return not self.GetNotPass() - self.GetSkip()
diff --git a/src/build/android/pylib/base/base_test_result_unittest.py b/src/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000..31a1f60
--- /dev/null
+++ b/src/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+  def setUp(self):
+    self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    self.p2 = BaseTestResult('p2', ResultType.PASS)
+    self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+    self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+    self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+    self.tr = TestRunResults()
+    self.tr.AddResult(self.p1)
+    self.tr.AddResult(other_p1)
+    self.tr.AddResult(self.p2)
+    self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+  def testGetAll(self):
+    self.assertFalse(
+        self.tr.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+  def testGetPass(self):
+    self.assertFalse(self.tr.GetPass().symmetric_difference(
+        [self.p1, self.p2]))
+
+  def testGetNotPass(self):
+    self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+        [self.f1, self.c1, self.u1]))
+
+  def testGetAddTestRunResults(self):
+    tr2 = TestRunResults()
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    f2 = BaseTestResult('f2', ResultType.FAIL)
+    tr2.AddResult(other_p1)
+    tr2.AddResult(f2)
+    tr2.AddTestRunResults(self.tr)
+    self.assertFalse(
+        tr2.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+  def testGetLogs(self):
+    log_print = ('[FAIL] f1:\n'
+                 'failure1\n'
+                 '[CRASH] c1:\n'
+                 'crash1')
+    self.assertEqual(self.tr.GetLogs(), log_print)
+
+  def testGetShortForm(self):
+    short_print = ('ALL: 5         PASS: 2        FAIL: 1        '
+                   'CRASH: 1       TIMEOUT: 0     UNKNOWN: 1     ')
+    self.assertEqual(self.tr.GetShortForm(), short_print)
+
+  def testGetGtestForm(self):
+    gtest_print = ('[==========] 5 tests ran.\n'
+                   '[  PASSED  ] 2 tests.\n'
+                   '[  FAILED  ] 3 tests, listed below:\n'
+                   '[  FAILED  ] f1\n'
+                   '[  FAILED  ] c1 (CRASHED)\n'
+                   '[  FAILED  ] u1 (UNKNOWN)\n'
+                   '\n'
+                   '3 FAILED TESTS')
+    self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+  def testRunPassed(self):
+    self.assertFalse(self.tr.DidRunPass())
+    tr2 = TestRunResults()
+    self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/base/environment.py b/src/build/android/pylib/base/environment.py
new file mode 100644
index 0000000..744c392
--- /dev/null
+++ b/src/build/android/pylib/base/environment.py
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+  """An environment in which tests can be run.
+
+  This is expected to handle all logic that is applicable to an entire specific
+  environment but is independent of the test type.
+
+  Examples include:
+    - The local device environment, for running tests on devices attached to
+      the local machine.
+    - The local machine environment, for running tests directly on the local
+      machine.
+  """
+
+  def __init__(self, output_manager):
+    """Environment constructor.
+
+    Args:
+      output_manager: Instance of |output_manager.OutputManager| used to
+          save test output.
+    """
+    self._output_manager = output_manager
+
+    # Some subclasses have different teardown behavior on receiving SIGTERM.
+    self._received_sigterm = False
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
+  @property
+  def output_manager(self):
+    return self._output_manager
+
+  def ReceivedSigterm(self):
+    self._received_sigterm = True
diff --git a/src/build/android/pylib/base/environment_factory.py b/src/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000..2ff93f3
--- /dev/null
+++ b/src/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.local.machine import local_machine_environment
+
+try:
+  # local_emulator_environment depends on //tools.
+  # If a client pulls in the //build subtree but not the //tools
+  # one, fail at emulator environment creation time.
+  from pylib.local.emulator import local_emulator_environment
+except ImportError:
+  local_emulator_environment = None
+
+
+def CreateEnvironment(args, output_manager, error_func):
+
+  if args.environment == 'local':
+    if args.command not in constants.LOCAL_MACHINE_TESTS:
+      if args.avd_config:
+        if not local_emulator_environment:
+          error_func('emulator environment requested but not available.')
+        return local_emulator_environment.LocalEmulatorEnvironment(
+            args, output_manager, error_func)
+      return local_device_environment.LocalDeviceEnvironment(
+          args, output_manager, error_func)
+    else:
+      return local_machine_environment.LocalMachineEnvironment(
+          args, output_manager, error_func)
+
+  error_func('Unable to create %s environment.' % args.environment)
diff --git a/src/build/android/pylib/base/mock_environment.py b/src/build/android/pylib/base/mock_environment.py
new file mode 100644
index 0000000..d7293c7
--- /dev/null
+++ b/src/build/android/pylib/base/mock_environment.py
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.base import environment
+
+import mock  # pylint: disable=import-error
+
+
+MockEnvironment = mock.MagicMock(environment.Environment)
diff --git a/src/build/android/pylib/base/mock_test_instance.py b/src/build/android/pylib/base/mock_test_instance.py
new file mode 100644
index 0000000..19a1d7e
--- /dev/null
+++ b/src/build/android/pylib/base/mock_test_instance.py
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.base import test_instance
+
+import mock  # pylint: disable=import-error
+
+
+MockTestInstance = mock.MagicMock(test_instance.TestInstance)
diff --git a/src/build/android/pylib/base/output_manager.py b/src/build/android/pylib/base/output_manager.py
new file mode 100644
index 0000000..53e5aea
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager.py
@@ -0,0 +1,159 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import logging
+import os
+import tempfile
+
+from devil.utils import reraiser_thread
+
+
+class Datatype(object):
+  HTML = 'text/html'
+  JSON = 'application/json'
+  PNG = 'image/png'
+  TEXT = 'text/plain'
+
+
+class OutputManager(object):
+
+  def __init__(self):
+    """OutputManager Constructor.
+
+    This class provides a simple interface to save test output. Subclasses
+    of this will allow users to save test results in the cloud or locally.
+    """
+    self._allow_upload = False
+    self._thread_group = None
+
+  @contextlib.contextmanager
+  def ArchivedTempfile(
+      self, out_filename, out_subdir, datatype=Datatype.TEXT):
+    """Archive file contents asynchonously and then deletes file.
+
+    Args:
+      out_filename: Name for saved file.
+      out_subdir: Directory to save |out_filename| to.
+      datatype: Datatype of file.
+
+    Returns:
+      An ArchivedFile file. This file will be uploaded async when the context
+      manager exits. AFTER the context manager exits, you can get the link to
+      where the file will be stored using the Link() API. You can use typical
+      file APIs to write and flish the ArchivedFile. You can also use file.name
+      to get the local filepath to where the underlying file exists. If you do
+      this, you are responsible of flushing the file before exiting the context
+      manager.
+    """
+    if not self._allow_upload:
+      raise Exception('Must run |SetUp| before attempting to upload!')
+
+    f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
+    try:
+      yield f
+    finally:
+      f.PrepareArchive()
+
+      def archive():
+        try:
+          f.Archive()
+        finally:
+          f.Delete()
+
+      thread = reraiser_thread.ReraiserThread(func=archive)
+      thread.start()
+      self._thread_group.Add(thread)
+
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    """Returns an instance of ArchivedFile."""
+    raise NotImplementedError
+
+  def SetUp(self):
+    self._allow_upload = True
+    self._thread_group = reraiser_thread.ReraiserThreadGroup()
+
+  def TearDown(self):
+    self._allow_upload = False
+    logging.info('Finishing archiving output.')
+    self._thread_group.JoinAll()
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
+
+class ArchivedFile(object):
+
+  def __init__(self, out_filename, out_subdir, datatype):
+    self._out_filename = out_filename
+    self._out_subdir = out_subdir
+    self._datatype = datatype
+
+    self._f = tempfile.NamedTemporaryFile(delete=False)
+    self._ready_to_archive = False
+
+  @property
+  def name(self):
+    return self._f.name
+
+  def write(self, *args, **kwargs):
+    if self._ready_to_archive:
+      raise Exception('Cannot write to file after archiving has begun!')
+    self._f.write(*args, **kwargs)
+
+  def flush(self, *args, **kwargs):
+    if self._ready_to_archive:
+      raise Exception('Cannot flush file after archiving has begun!')
+    self._f.flush(*args, **kwargs)
+
+  def Link(self):
+    """Returns location of archived file."""
+    if not self._ready_to_archive:
+      raise Exception('Cannot get link to archived file before archiving '
+                      'has begun')
+    return self._Link()
+
+  def _Link(self):
+    """Note for when overriding this function.
+
+    This function will certainly be called before the file
+    has finished being archived. Therefore, this needs to be able to know the
+    exact location of the archived file before it is finished being archived.
+    """
+    raise NotImplementedError
+
+  def PrepareArchive(self):
+    """Meant to be called synchronously to prepare file for async archiving."""
+    self.flush()
+    self._ready_to_archive = True
+    self._PrepareArchive()
+
+  def _PrepareArchive(self):
+    """Note for when overriding this function.
+
+    This function is needed for things such as computing the location of
+    content addressed files. This is called after the file is written but
+    before archiving has begun.
+    """
+    pass
+
+  def Archive(self):
+    """Archives file."""
+    if not self._ready_to_archive:
+      raise Exception('File is not ready to archive. Be sure you are not '
+                      'writing to the file and PrepareArchive has been called')
+    self._Archive()
+
+  def _Archive(self):
+    raise NotImplementedError
+
+  def Delete(self):
+    """Deletes the backing file."""
+    self._f.close()
+    os.remove(self.name)
diff --git a/src/build/android/pylib/base/output_manager_factory.py b/src/build/android/pylib/base/output_manager_factory.py
new file mode 100644
index 0000000..891692d
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager_factory.py
@@ -0,0 +1,18 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib import constants
+from pylib.output import local_output_manager
+from pylib.output import remote_output_manager
+from pylib.utils import local_utils
+
+
+def CreateOutputManager(args):
+  if args.local_output or not local_utils.IsOnSwarming():
+    return local_output_manager.LocalOutputManager(
+        output_dir=constants.GetOutDirectory())
+  else:
+    return remote_output_manager.RemoteOutputManager(
+        bucket=args.gs_results_bucket)
diff --git a/src/build/android/pylib/base/output_manager_test_case.py b/src/build/android/pylib/base/output_manager_test_case.py
new file mode 100644
index 0000000..7b7e462
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager_test_case.py
@@ -0,0 +1,15 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import os.path
+import unittest
+
+
+class OutputManagerTestCase(unittest.TestCase):
+
+  def assertUsableTempFile(self, archived_tempfile):
+    self.assertTrue(bool(archived_tempfile.name))
+    self.assertTrue(os.path.exists(archived_tempfile.name))
+    self.assertTrue(os.path.isfile(archived_tempfile.name))
diff --git a/src/build/android/pylib/base/result_sink.py b/src/build/android/pylib/base/result_sink.py
new file mode 100644
index 0000000..424b873
--- /dev/null
+++ b/src/build/android/pylib/base/result_sink.py
@@ -0,0 +1,163 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+import base64
+import cgi
+import json
+import os
+
+import six
+if not six.PY2:
+  import html  # pylint: disable=import-error
+
+from pylib.base import base_test_result
+import requests  # pylint: disable=import-error
+
+# Comes from luci/resultdb/pbutil/test_result.go
+MAX_REPORT_LEN = 4 * 1024
+
+# Maps base_test_results to the luci test-result.proto.
+# https://godoc.org/go.chromium.org/luci/resultdb/proto/v1#TestStatus
+RESULT_MAP = {
+    base_test_result.ResultType.UNKNOWN: 'ABORT',
+    base_test_result.ResultType.PASS: 'PASS',
+    base_test_result.ResultType.FAIL: 'FAIL',
+    base_test_result.ResultType.CRASH: 'CRASH',
+    base_test_result.ResultType.TIMEOUT: 'ABORT',
+    base_test_result.ResultType.SKIP: 'SKIP',
+    base_test_result.ResultType.NOTRUN: 'SKIP',
+}
+
+
+def TryInitClient():
+  """Tries to initialize a result_sink_client object.
+
+  Assumes that rdb stream is already running.
+
+  Returns:
+    A ResultSinkClient for the result_sink server else returns None.
+  """
+  try:
+    with open(os.environ['LUCI_CONTEXT']) as f:
+      sink = json.load(f)['result_sink']
+      return ResultSinkClient(sink)
+  except KeyError:
+    return None
+
+
+class ResultSinkClient(object):
+  """A class to store the sink's post configurations and make post requests.
+
+  This assumes that the rdb stream has been called already and that the
+  server is listening.
+  """
+  def __init__(self, context):
+    base_url = 'http://%s/prpc/luci.resultsink.v1.Sink' % context['address']
+    self.test_results_url = base_url + '/ReportTestResults'
+    self.report_artifacts_url = base_url + '/ReportInvocationLevelArtifacts'
+
+    self.headers = {
+        'Content-Type': 'application/json',
+        'Accept': 'application/json',
+        'Authorization': 'ResultSink %s' % context['auth_token'],
+    }
+
+  def Post(self, test_id, status, duration, test_log, test_file,
+           artifacts=None):
+    """Uploads the test result to the ResultSink server.
+
+    This assumes that the rdb stream has been called already and that
+    server is ready listening.
+
+    Args:
+      test_id: A string representing the test's name.
+      status: A string representing if the test passed, failed, etc...
+      duration: An int representing time in ms.
+      test_log: A string representing the test's output.
+      test_file: A string representing the file location of the test.
+      artifacts: An optional dict of artifacts to attach to the test.
+
+    Returns:
+      N/A
+    """
+    assert status in RESULT_MAP
+    expected = status in (base_test_result.ResultType.PASS,
+                          base_test_result.ResultType.SKIP)
+    result_db_status = RESULT_MAP[status]
+
+    # Slightly smaller to allow addition of <pre> tags and message.
+    report_check_size = MAX_REPORT_LEN - 45
+    if six.PY2:
+      test_log_escaped = cgi.escape(test_log)
+    else:
+      test_log_escaped = html.escape(test_log)
+    if len(test_log_escaped) > report_check_size:
+      test_log_formatted = ('<pre>' + test_log_escaped[:report_check_size] +
+                            '...Full output in Artifact.</pre>')
+    else:
+      test_log_formatted = '<pre>' + test_log_escaped + '</pre>'
+
+    tr = {
+        'expected':
+        expected,
+        'status':
+        result_db_status,
+        'summaryHtml':
+        test_log_formatted,
+        'tags': [
+            {
+                'key': 'test_name',
+                'value': test_id,
+            },
+            {
+                # Status before getting mapped to result_db statuses.
+                'key': 'android_test_runner_status',
+                'value': status,
+            }
+        ],
+        'testId':
+        test_id,
+    }
+    artifacts = artifacts or {}
+    if len(test_log_escaped) > report_check_size:
+      # Upload the original log without any modifications.
+      b64_log = six.ensure_str(base64.b64encode(six.ensure_binary(test_log)))
+      artifacts.update({'Test Log': {'contents': b64_log}})
+    if artifacts:
+      tr['artifacts'] = artifacts
+
+    if duration is not None:
+      # Duration must be formatted to avoid scientific notation in case
+      # number is too small or too large. Result_db takes seconds, not ms.
+      # Need to use float() otherwise it does substitution first then divides.
+      tr['duration'] = '%.9fs' % float(duration / 1000.0)
+
+    if test_file and str(test_file).startswith('//'):
+      tr['testMetadata'] = {
+          'name': test_id,
+          'location': {
+              'file_name': test_file,
+              'repo': 'https://chromium.googlesource.com/chromium/src',
+          }
+      }
+
+    res = requests.post(url=self.test_results_url,
+                        headers=self.headers,
+                        data=json.dumps({'testResults': [tr]}))
+    res.raise_for_status()
+
+  def ReportInvocationLevelArtifacts(self, artifacts):
+    """Uploads invocation-level artifacts to the ResultSink server.
+
+    This is for artifacts that don't apply to a single test but to the test
+    invocation as a whole (eg: system logs).
+
+    Args:
+      artifacts: A dict of artifacts to attach to the invocation.
+    """
+    req = {'artifacts': artifacts}
+    res = requests.post(url=self.report_artifacts_url,
+                        headers=self.headers,
+                        data=json.dumps(req))
+    res.raise_for_status()
diff --git a/src/build/android/pylib/base/test_collection.py b/src/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000..83b3bf8
--- /dev/null
+++ b/src/build/android/pylib/base/test_collection.py
@@ -0,0 +1,81 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import threading
+
+class TestCollection(object):
+  """A threadsafe collection of tests.
+
+  Args:
+    tests: List of tests to put in the collection.
+  """
+
+  def __init__(self, tests=None):
+    if not tests:
+      tests = []
+    self._lock = threading.Lock()
+    self._tests = []
+    self._tests_in_progress = 0
+    # Used to signal that an item is available or all items have been handled.
+    self._item_available_or_all_done = threading.Event()
+    for t in tests:
+      self.add(t)
+
+  def _pop(self):
+    """Pop a test from the collection.
+
+    Waits until a test is available or all tests have been handled.
+
+    Returns:
+      A test or None if all tests have been handled.
+    """
+    while True:
+      # Wait for a test to be available or all tests to have been handled.
+      self._item_available_or_all_done.wait()
+      with self._lock:
+        # Check which of the two conditions triggered the signal.
+        if self._tests_in_progress == 0:
+          return None
+        try:
+          return self._tests.pop(0)
+        except IndexError:
+          # Another thread beat us to the available test, wait again.
+          self._item_available_or_all_done.clear()
+
+  def add(self, test):
+    """Add a test to the collection.
+
+    Args:
+      test: A test to add.
+    """
+    with self._lock:
+      self._tests.append(test)
+      self._item_available_or_all_done.set()
+      self._tests_in_progress += 1
+
+  def test_completed(self):
+    """Indicate that a test has been fully handled."""
+    with self._lock:
+      self._tests_in_progress -= 1
+      if self._tests_in_progress == 0:
+        # All tests have been handled, signal all waiting threads.
+        self._item_available_or_all_done.set()
+
+  def __iter__(self):
+    """Iterate through tests in the collection until all have been handled."""
+    while True:
+      r = self._pop()
+      if r is None:
+        break
+      yield r
+
+  def __len__(self):
+    """Return the number of tests currently in the collection."""
+    return len(self._tests)
+
+  def test_names(self):
+    """Return a list of the names of the tests currently in the collection."""
+    with self._lock:
+      return list(t.test for t in self._tests)
diff --git a/src/build/android/pylib/base/test_exception.py b/src/build/android/pylib/base/test_exception.py
new file mode 100644
index 0000000..c98d2cb
--- /dev/null
+++ b/src/build/android/pylib/base/test_exception.py
@@ -0,0 +1,8 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestException(Exception):
+  """Base class for exceptions thrown by the test runner."""
+  pass
diff --git a/src/build/android/pylib/base/test_instance.py b/src/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000..7b1099c
--- /dev/null
+++ b/src/build/android/pylib/base/test_instance.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+  """A type of test.
+
+  This is expected to handle all logic that is test-type specific but
+  independent of the environment or device.
+
+  Examples include:
+    - gtests
+    - instrumentation tests
+  """
+
+  def __init__(self):
+    pass
+
+  def TestType(self):
+    raise NotImplementedError
+
+  # pylint: disable=no-self-use
+  def GetPreferredAbis(self):
+    return None
+
+  # pylint: enable=no-self-use
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
diff --git a/src/build/android/pylib/base/test_instance_factory.py b/src/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000..f47242a
--- /dev/null
+++ b/src/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.utils import device_dependencies
+
+
+def CreateTestInstance(args, error_func):
+
+  if args.command == 'gtest':
+    return gtest_test_instance.GtestTestInstance(
+        args, device_dependencies.GetDataDependencies, error_func)
+  elif args.command == 'instrumentation':
+    return instrumentation_test_instance.InstrumentationTestInstance(
+        args, device_dependencies.GetDataDependencies, error_func)
+  elif args.command == 'junit':
+    return junit_test_instance.JunitTestInstance(args, error_func)
+  elif args.command == 'monkey':
+    return monkey_test_instance.MonkeyTestInstance(args, error_func)
+
+  error_func('Unable to create %s test instance.' % args.command)
diff --git a/src/build/android/pylib/base/test_run.py b/src/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000..fc72d3a
--- /dev/null
+++ b/src/build/android/pylib/base/test_run.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+  """An execution of a particular test on a particular device.
+
+  This is expected to handle all logic that is specific to the combination of
+  environment and test type.
+
+  Examples include:
+    - local gtests
+    - local instrumentation tests
+  """
+
+  def __init__(self, env, test_instance):
+    self._env = env
+    self._test_instance = test_instance
+
+    # Some subclasses have different teardown behavior on receiving SIGTERM.
+    self._received_sigterm = False
+
+  def TestPackage(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def RunTests(self, results):
+    """Runs Tests and populates |results|.
+
+    Args:
+      results: An array that should be populated with
+               |base_test_result.TestRunResults| objects.
+    """
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.TearDown()
+
+  def ReceivedSigterm(self):
+    self._received_sigterm = True
diff --git a/src/build/android/pylib/base/test_run_factory.py b/src/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000..35d5494
--- /dev/null
+++ b/src/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,36 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.local.device import local_device_monkey_test_run
+from pylib.local.machine import local_machine_environment
+from pylib.local.machine import local_machine_junit_test_run
+
+
+def CreateTestRun(env, test_instance, error_func):
+  if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (local_device_instrumentation_test_run
+              .LocalDeviceInstrumentationTestRun(env, test_instance))
+    if isinstance(test_instance, monkey_test_instance.MonkeyTestInstance):
+      return (local_device_monkey_test_run
+              .LocalDeviceMonkeyTestRun(env, test_instance))
+
+  if isinstance(env, local_machine_environment.LocalMachineEnvironment):
+    if isinstance(test_instance, junit_test_instance.JunitTestInstance):
+      return (local_machine_junit_test_run
+              .LocalMachineJunitTestRun(env, test_instance))
+
+  error_func('Unable to create test run for %s tests in %s environment'
+             % (str(test_instance), str(env)))
diff --git a/src/build/android/pylib/base/test_server.py b/src/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000..763e121
--- /dev/null
+++ b/src/build/android/pylib/base/test_server.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+  """Base class for any server that needs to be set up for the tests."""
+
+  def __init__(self, *args, **kwargs):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def Reset(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
diff --git a/src/build/android/pylib/constants/__init__.py b/src/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000..2d1be26
--- /dev/null
+++ b/src/build/android/pylib/constants/__init__.py
@@ -0,0 +1,288 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+from __future__ import absolute_import
+import collections
+import glob
+import logging
+import os
+import subprocess
+
+import devil.android.sdk.keyevent
+from devil.android.constants import chrome
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+
+
+keyevent = devil.android.sdk.keyevent
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+
+PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
+PACKAGE_INFO.update({
+    'legacy_browser':
+    chrome.PackageInfo('com.google.android.browser',
+                       'com.android.browser.BrowserActivity', None, None),
+    'chromecast_shell':
+    chrome.PackageInfo('com.google.android.apps.mediashell',
+                       'com.google.android.apps.mediashell.MediaShellActivity',
+                       'castshell-command-line', None),
+    'android_webview_shell':
+    chrome.PackageInfo('org.chromium.android_webview.shell',
+                       'org.chromium.android_webview.shell.AwShellActivity',
+                       'android-webview-command-line', None),
+    'gtest':
+    chrome.PackageInfo('org.chromium.native_test',
+                       'org.chromium.native_test.NativeUnitTestActivity',
+                       'chrome-native-tests-command-line', None),
+    'android_browsertests':
+    chrome.PackageInfo('org.chromium.android_browsertests_apk',
+                       ('org.chromium.android_browsertests_apk' +
+                        '.ChromeBrowserTestsActivity'),
+                       'chrome-native-tests-command-line', None),
+    'components_browsertests':
+    chrome.PackageInfo('org.chromium.components_browsertests_apk',
+                       ('org.chromium.components_browsertests_apk' +
+                        '.ComponentsBrowserTestsActivity'),
+                       'chrome-native-tests-command-line', None),
+    'content_browsertests':
+    chrome.PackageInfo(
+        'org.chromium.content_browsertests_apk',
+        'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+        'chrome-native-tests-command-line', None),
+    'chromedriver_webview_shell':
+    chrome.PackageInfo('org.chromium.chromedriver_webview_shell',
+                       'org.chromium.chromedriver_webview_shell.Main', None,
+                       None),
+    'android_webview_cts':
+    chrome.PackageInfo('com.android.webview',
+                       'com.android.cts.webkit.WebViewStartupCtsActivity',
+                       'webview-command-line', None),
+    'android_google_webview_cts':
+    chrome.PackageInfo('com.google.android.webview',
+                       'com.android.cts.webkit.WebViewStartupCtsActivity',
+                       'webview-command-line', None),
+    'android_system_webview_shell':
+    chrome.PackageInfo('org.chromium.webview_shell',
+                       'org.chromium.webview_shell.WebViewBrowserActivity',
+                       'webview-command-line', None),
+    'android_webview_ui_test':
+    chrome.PackageInfo('org.chromium.webview_ui_test',
+                       'org.chromium.webview_ui_test.WebViewUiTestActivity',
+                       'webview-command-line', None),
+    'weblayer_browsertests':
+    chrome.PackageInfo(
+        'org.chromium.weblayer_browsertests_apk',
+        'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity',
+        'chrome-native-tests-command-line', None),
+})
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+    '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+ANDROID_SDK_BUILD_TOOLS_VERSION = '30.0.1'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk',
+                                'public')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+                                 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party', 'android_ndk')
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+                                os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                                'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+# TODO(jbudorick): Remove once unused.
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Configure ubsan to print stack traces in the format understood by "stack" so
+# that they will be symbolized, and disable signal handlers because they
+# interfere with the breakpad and sandbox tests.
+# This value is duplicated in
+# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java
+UBSAN_OPTIONS = (
+    'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' '
+    'handle_segv=0 handle_sigbus=0 handle_sigfpe=0')
+
+# TODO(jbudorick): Rework this into testing/buildbot/
+PYTHON_UNIT_TEST_SUITES = {
+    'pylib_py_unittests': {
+        'path':
+        os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+        'test_modules': [
+            'devil.android.device_utils_test',
+            'devil.android.md5sum_test',
+            'devil.utils.cmd_helper_test',
+            'pylib.results.json_results_test',
+            'pylib.utils.proguard_test',
+        ]
+    },
+    'gyp_py_unittests': {
+        'path':
+        os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+        'test_modules': [
+            'java_cpp_enum_tests',
+            'java_cpp_strings_tests',
+            'java_google_api_keys_tests',
+            'extract_unwind_tables_tests',
+        ]
+    },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+                    'perf', 'python']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def SetBuildType(build_type):
+  """Set the BUILDTYPE environment variable.
+
+  NOTE: Using this function is deprecated, in favor of SetOutputDirectory(),
+        it is still maintained for a few scripts that typically call it
+        to implement their --release and --debug command-line options.
+
+        When writing a new script, consider supporting an --output-dir or
+        --chromium-output-dir option instead, and calling SetOutputDirectory()
+        instead.
+
+  NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was
+  called previously, this will be completely ignored.
+  """
+  chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+  if chromium_output_dir:
+    logging.warning(
+        'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already '
+        'defined as (%s)', build_type, chromium_output_dir)
+  os.environ['BUILDTYPE'] = build_type
+
+
+def SetOutputDirectory(output_directory):
+  """Set the Chromium output directory.
+
+  This must be called early by scripts that rely on GetOutDirectory() or
+  CheckOutputDirectory(). Typically by providing an --output-dir or
+  --chromium-output-dir option.
+  """
+  os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+# The message that is printed when the Chromium output directory cannot
+# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned
+# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead.
+_MISSING_OUTPUT_DIR_MESSAGE = '\
+The Chromium output directory could not be found. Please use an option such as \
+--output-directory to provide it (see --help for details). Otherwise, \
+define the CHROMIUM_OUTPUT_DIR environment variable.'
+
+
+def GetOutDirectory():
+  """Returns the Chromium build output directory.
+
+  NOTE: This is determined in the following way:
+    - From a previous call to SetOutputDirectory()
+    - Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined.
+    - Otherwise, from the current Chromium source directory, and a previous
+      call to SetBuildType() or the BUILDTYPE env variable, in combination
+      with the optional CHROMIUM_OUT_DIR env variable.
+  """
+  if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+    return os.path.abspath(os.path.join(
+        DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+  build_type = os.environ.get('BUILDTYPE')
+  if not build_type:
+    raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE)
+
+  return os.path.abspath(os.path.join(
+      DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+      build_type))
+
+
+def CheckOutputDirectory():
+  """Checks that the Chromium output directory is set, or can be found.
+
+  If it is not already set, this will also perform a little auto-detection:
+
+    - If the current directory contains a build.ninja file, use it as
+      the output directory.
+
+    - If CHROME_HEADLESS is defined in the environment (e.g. on a bot),
+      look if there is a single output directory under DIR_SOURCE_ROOT/out/,
+      and if so, use it as the output directory.
+
+  Raises:
+    Exception: If no output directory is detected.
+  """
+  output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+  if output_dir:
+    return
+
+  build_type = os.environ.get('BUILDTYPE')
+  if build_type and len(build_type) > 1:
+    return
+
+  # If CWD is an output directory, then assume it's the desired one.
+  if os.path.exists('build.ninja'):
+    output_dir = os.getcwd()
+    SetOutputDirectory(output_dir)
+    return
+
+  # When running on bots, see if the output directory is obvious.
+  # TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set
+  # CHROMIUM_OUTPUT_DIR correctly.
+  if os.environ.get('CHROME_HEADLESS'):
+    dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja'))
+    if len(dirs) == 1:
+      SetOutputDirectory(dirs[0])
+      return
+
+    raise Exception(
+        'Chromium output directory not set, and CHROME_HEADLESS detected. ' +
+        'However, multiple out dirs exist: %r' % dirs)
+
+  raise Exception(_MISSING_OUTPUT_DIR_MESSAGE)
+
+
+# Exit codes
+ERROR_EXIT_CODE = exit_codes.ERROR
+INFRA_EXIT_CODE = exit_codes.INFRA
+WARNING_EXIT_CODE = exit_codes.WARNING
diff --git a/src/build/android/pylib/constants/host_paths.py b/src/build/android/pylib/constants/host_paths.py
new file mode 100644
index 0000000..a38d28e
--- /dev/null
+++ b/src/build/android/pylib/constants/host_paths.py
@@ -0,0 +1,97 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import os
+import sys
+
+from pylib import constants
+
+DIR_SOURCE_ROOT = os.environ.get(
+    'CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+
+BUILD_COMMON_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')
+
+# third-party libraries
+ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development',
+    'scripts')
+BUILD_PATH = os.path.join(DIR_SOURCE_ROOT, 'build')
+DEVIL_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
+JAVA_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+                         'bin')
+TRACING_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
+
+@contextlib.contextmanager
+def SysPath(path, position=None):
+  if position is None:
+    sys.path.append(path)
+  else:
+    sys.path.insert(position, path)
+  try:
+    yield
+  finally:
+    if sys.path[-1] == path:
+      sys.path.pop()
+    else:
+      sys.path.remove(path)
+
+
+# Map of CPU architecture name to (toolchain_name, binprefix) pairs.
+# TODO(digit): Use the build_vars.json file generated by gn.
+_TOOL_ARCH_MAP = {
+  'arm': ('arm-linux-androideabi-4.9', 'arm-linux-androideabi'),
+  'arm64': ('aarch64-linux-android-4.9', 'aarch64-linux-android'),
+  'x86': ('x86-4.9', 'i686-linux-android'),
+  'x86_64': ('x86_64-4.9', 'x86_64-linux-android'),
+  'x64': ('x86_64-4.9', 'x86_64-linux-android'),
+  'mips': ('mipsel-linux-android-4.9', 'mipsel-linux-android'),
+}
+
+# Cache used to speed up the results of ToolPath()
+# Maps (arch, tool_name) pairs to fully qualified program paths.
+# Useful because ToolPath() is called repeatedly for demangling C++ symbols.
+_cached_tool_paths = {}
+
+
+def ToolPath(tool, cpu_arch):
+  """Return a fully qualifed path to an arch-specific toolchain program.
+
+  Args:
+    tool: Unprefixed toolchain program name (e.g. 'objdump')
+    cpu_arch: Target CPU architecture (e.g. 'arm64')
+  Returns:
+    Fully qualified path (e.g. ..../aarch64-linux-android-objdump')
+  Raises:
+    Exception if the toolchain could not be found.
+  """
+  tool_path = _cached_tool_paths.get((tool, cpu_arch))
+  if tool_path:
+    return tool_path
+
+  toolchain_source, toolchain_prefix = _TOOL_ARCH_MAP.get(
+      cpu_arch, (None, None))
+  if not toolchain_source:
+    raise Exception('Could not find tool chain for ' + cpu_arch)
+
+  toolchain_subdir = (
+      'toolchains/%s/prebuilt/linux-x86_64/bin' % toolchain_source)
+
+  tool_path = os.path.join(constants.ANDROID_NDK_ROOT,
+                           toolchain_subdir,
+                           toolchain_prefix + '-' + tool)
+
+  _cached_tool_paths[(tool, cpu_arch)] = tool_path
+  return tool_path
+
+
+def GetAaptPath():
+  """Returns the path to the 'aapt' executable."""
+  return os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
diff --git a/src/build/android/pylib/constants/host_paths_unittest.py b/src/build/android/pylib/constants/host_paths_unittest.py
new file mode 100755
index 0000000..72be4ed
--- /dev/null
+++ b/src/build/android/pylib/constants/host_paths_unittest.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+import os
+import unittest
+
+import six
+import pylib.constants as constants
+import pylib.constants.host_paths as host_paths
+
+# This map corresponds to the binprefix of NDK prebuilt toolchains for various
+# target CPU architectures. Note that 'x86_64' and 'x64' are the same.
+_EXPECTED_NDK_TOOL_SUBDIR_MAP = {
+  'arm': 'toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/' +
+         'arm-linux-androideabi-',
+  'arm64':
+      'toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+      'aarch64-linux-android-',
+  'x86': 'toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-',
+  'x86_64':
+      'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+  'x64':
+      'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+   'mips':
+      'toolchains/mipsel-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+      'mipsel-linux-android-'
+}
+
+
+class HostPathsTest(unittest.TestCase):
+  def setUp(self):
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def test_GetAaptPath(self):
+    _EXPECTED_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+    self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+    self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+
+  def test_ToolPath(self):
+    for cpu_arch, binprefix in six.iteritems(_EXPECTED_NDK_TOOL_SUBDIR_MAP):
+      expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix)
+      expected_path = expected_binprefix + 'foo'
+      self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/content_settings.py b/src/build/android/pylib/content_settings.py
new file mode 100644
index 0000000..3bf11bc
--- /dev/null
+++ b/src/build/android/pylib/content_settings.py
@@ -0,0 +1,80 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ContentSettings(dict):
+
+  """A dict interface to interact with device content settings.
+
+  System properties are key/value pairs as exposed by adb shell content.
+  """
+
+  def __init__(self, table, device):
+    super(ContentSettings, self).__init__()
+    self._table = table
+    self._device = device
+
+  @staticmethod
+  def _GetTypeBinding(value):
+    if isinstance(value, bool):
+      return 'b'
+    if isinstance(value, float):
+      return 'f'
+    if isinstance(value, int):
+      return 'i'
+    if isinstance(value, long):
+      return 'l'
+    if isinstance(value, str):
+      return 's'
+    raise ValueError('Unsupported type %s' % type(value))
+
+  def iteritems(self):
+    # Example row:
+    # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+    for row in self._device.RunShellCommand(
+        'content query --uri content://%s' % self._table, as_root=True):
+      fields = row.split(', ')
+      key = None
+      value = None
+      for field in fields:
+        k, _, v = field.partition('=')
+        if k == 'name':
+          key = v
+        elif k == 'value':
+          value = v
+      if not key:
+        continue
+      if not value:
+        value = ''
+      yield key, value
+
+  def __getitem__(self, key):
+    return self._device.RunShellCommand(
+        'content query --uri content://%s --where "name=\'%s\'" '
+        '--projection value' % (self._table, key), as_root=True).strip()
+
+  def __setitem__(self, key, value):
+    if key in self:
+      self._device.RunShellCommand(
+          'content update --uri content://%s '
+          '--bind value:%s:%s --where "name=\'%s\'"' % (
+              self._table,
+              self._GetTypeBinding(value), value, key),
+          as_root=True)
+    else:
+      self._device.RunShellCommand(
+          'content insert --uri content://%s '
+          '--bind name:%s:%s --bind value:%s:%s' % (
+              self._table,
+              self._GetTypeBinding(key), key,
+              self._GetTypeBinding(value), value),
+          as_root=True)
+
+  def __delitem__(self, key):
+    self._device.RunShellCommand(
+        'content delete --uri content://%s '
+        '--bind name:%s:%s' % (
+            self._table,
+            self._GetTypeBinding(key), key),
+        as_root=True)
diff --git a/src/build/android/pylib/device/__init__.py b/src/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/device/__init__.py
diff --git a/src/build/android/pylib/device/commands/BUILD.gn b/src/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000..13b69f6
--- /dev/null
+++ b/src/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+  data_deps = [ ":chromium_commands_java" ]
+}
+
+android_library("unzip_java") {
+  jacoco_never_instrument = true
+  sources = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+}
+
+dist_dex("chromium_commands_java") {
+  deps = [ ":unzip_java" ]
+  output = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+  data = [ output ]
+}
diff --git a/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000..cf0ff67
--- /dev/null
+++ b/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,93 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ *  Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+    private static final String TAG = "Unzip";
+
+    public static void main(String[] args) {
+        try {
+            (new Unzip()).run(args);
+        } catch (RuntimeException e) {
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private void showUsage(PrintStream s) {
+        s.println("Usage:");
+        s.println("unzip [zipfile]");
+    }
+
+    @SuppressWarnings("Finally")
+    private void unzip(String[] args) {
+        ZipInputStream zis = null;
+        try {
+            String zipfile = args[0];
+            zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+            ZipEntry ze = null;
+
+            byte[] bytes = new byte[1024];
+            while ((ze = zis.getNextEntry()) != null) {
+                File outputFile = new File(ze.getName());
+                if (ze.isDirectory()) {
+                    if (!outputFile.exists() && !outputFile.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + outputFile.toString());
+                    }
+                } else {
+                    File parentDir = outputFile.getParentFile();
+                    if (!parentDir.exists() && !parentDir.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + parentDir.toString());
+                    }
+                    OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+                    int actual_bytes = 0;
+                    int total_bytes = 0;
+                    while ((actual_bytes = zis.read(bytes)) != -1) {
+                        out.write(bytes, 0, actual_bytes);
+                        total_bytes += actual_bytes;
+                    }
+                    out.close();
+                }
+                zis.closeEntry();
+            }
+
+        } catch (IOException e) {
+            throw new RuntimeException("Error while unzipping", e);
+        } finally {
+            try {
+                if (zis != null) zis.close();
+            } catch (IOException e) {
+                throw new RuntimeException("Error while closing zip: " + e.toString());
+            }
+        }
+    }
+
+    public void run(String[] args) {
+        if (args.length != 1) {
+            showUsage(System.err);
+            throw new RuntimeException("Incorrect usage!");
+        }
+
+        unzip(args);
+    }
+}
+
diff --git a/src/build/android/pylib/device_settings.py b/src/build/android/pylib/device_settings.py
new file mode 100644
index 0000000..ab4ad1b
--- /dev/null
+++ b/src/build/android/pylib/device_settings.py
@@ -0,0 +1,199 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import content_settings
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+    '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng']
+
+
+def ConfigureContentSettings(device, desired_settings):
+  """Configures device content setings from a list.
+
+  Many settings are documented at:
+    http://developer.android.com/reference/android/provider/Settings.Global.html
+    http://developer.android.com/reference/android/provider/Settings.Secure.html
+    http://developer.android.com/reference/android/provider/Settings.System.html
+
+  Many others are undocumented.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+    desired_settings: A list of (table, [(key: value), ...]) for all
+        settings to configure.
+  """
+  for table, key_value in desired_settings:
+    settings = content_settings.ContentSettings(table, device)
+    for key, value in key_value:
+      settings[key] = value
+    logging.info('\n%s %s', table, (80 - len(table)) * '-')
+    for key, value in sorted(settings.iteritems()):
+      logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+  """Sets lock screen settings on the device.
+
+  On certain device/Android configurations we need to disable the lock screen in
+  a different database. Additionally, the password type must be set to
+  DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+  Lock screen settings are stored in sqlite on the device in:
+      /data/system/locksettings.db
+
+  IMPORTANT: The first column is used as a primary key so that all rows with the
+  same value for that column are removed from the table prior to inserting the
+  new values.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+
+  Raises:
+    Exception if the setting was not properly set.
+  """
+  if device.build_type not in _COMPATIBLE_BUILD_TYPES:
+    logging.warning('Unable to disable lockscreen on %s builds.',
+                    device.build_type)
+    return
+
+  def get_lock_settings(table):
+    return [(table, 'lockscreen.disabled', '1'),
+            (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+            (table, 'lockscreen.password_type_alternate',
+             PASSWORD_QUALITY_UNSPECIFIED)]
+
+  if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+    db = _LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('locksettings')
+    columns = ['name', 'user', 'value']
+    generate_values = lambda k, v: [k, '0', v]
+  elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+    db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('secure') + get_lock_settings('system')
+    columns = ['name', 'value']
+    generate_values = lambda k, v: [k, v]
+  else:
+    logging.warning('Unable to find database file to set lock screen settings.')
+    return
+
+  for table, key, value in locksettings:
+    # Set the lockscreen setting for default user '0'
+    values = generate_values(key, value)
+
+    cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+      'table': table,
+      'primary_key': columns[0],
+      'primary_value': values[0],
+      'columns': ', '.join(columns),
+      'values': ', '.join(["'%s'" % value for value in values])
+    }
+    output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+                                        as_root=True)
+    if output_msg:
+      logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+  # Note that setting these in this order is required in order for all of
+  # them to take and stick through a reboot.
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 1),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is enabled and allowed for tests.
+    ('location_providers_allowed', 'gps,network'),
+  ]),
+  ('com.google.settings/partner', [
+    ('network_location_opt_in', 1),
+  ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 0),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is disabled.
+    ('location_providers_allowed', ''),
+  ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 1),
+  ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 0),
+  ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+  ('settings/global', [
+    ('assisted_gps_enabled', 0),
+
+    # Disable "auto time" and "auto time zone" to avoid network-provided time
+    # to overwrite the device's datetime and timezone synchronized from host
+    # when running tests later. See b/6569849.
+    ('auto_time', 0),
+    ('auto_time_zone', 0),
+
+    ('development_settings_enabled', 1),
+
+    # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+    # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+    # will never display the "Report" button.
+    # Type: int ( 0 = disallow, 1 = allow )
+    ('send_action_app_error', 0),
+
+    ('stay_on_while_plugged_in', 3),
+
+    ('verifier_verify_adb_installs', 0),
+  ]),
+  ('settings/secure', [
+    ('allowed_geolocation_origins',
+        'http://www.google.co.uk http://www.google.com'),
+
+    # Ensure that we never get random dialogs like "Unfortunately the process
+    # android.process.acore has stopped", which steal the focus, and make our
+    # automation fail (because the dialog steals the focus then mistakenly
+    # receives the injected user input events).
+    ('anr_show_background', 0),
+
+    ('lockscreen.disabled', 1),
+
+    ('screensaver_enabled', 0),
+
+    ('skip_first_use_hints', 1),
+  ]),
+  ('settings/system', [
+    # Don't want devices to accidentally rotate the screen as that could
+    # affect performance measurements.
+    ('accelerometer_rotation', 0),
+
+    ('lockscreen.disabled', 1),
+
+    # Turn down brightness and disable auto-adjust so that devices run cooler.
+    ('screen_brightness', 5),
+    ('screen_brightness_mode', 0),
+
+    ('user_rotation', 0),
+  ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+  ('settings/global', [
+    ('airplane_mode_on', 1),
+    ('wifi_on', 0),
+  ]),
+]
diff --git a/src/build/android/pylib/dex/__init__.py b/src/build/android/pylib/dex/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/dex/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/dex/dex_parser.py b/src/build/android/pylib/dex/dex_parser.py
new file mode 100755
index 0000000..3f2ed6f
--- /dev/null
+++ b/src/build/android/pylib/dex/dex_parser.py
@@ -0,0 +1,549 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for optimistically parsing dex files.
+
+This file is not meant to provide a generic tool for analyzing dex files.
+A DexFile class that exposes access to several memory items in the dex format
+is provided, but it does not include error handling or validation.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import errno
+import os
+import re
+import struct
+import sys
+import zipfile
+
+# https://source.android.com/devices/tech/dalvik/dex-format#header-item
+_DEX_HEADER_FMT = (
+    ('magic', '8s'),
+    ('checksum', 'I'),
+    ('signature', '20s'),
+    ('file_size', 'I'),
+    ('header_size', 'I'),
+    ('endian_tag', 'I'),
+    ('link_size', 'I'),
+    ('link_off', 'I'),
+    ('map_off', 'I'),
+    ('string_ids_size', 'I'),
+    ('string_ids_off', 'I'),
+    ('type_ids_size', 'I'),
+    ('type_ids_off', 'I'),
+    ('proto_ids_size', 'I'),
+    ('proto_ids_off', 'I'),
+    ('field_ids_size', 'I'),
+    ('field_ids_off', 'I'),
+    ('method_ids_size', 'I'),
+    ('method_ids_off', 'I'),
+    ('class_defs_size', 'I'),
+    ('class_defs_off', 'I'),
+    ('data_size', 'I'),
+    ('data_off', 'I'),
+)
+
+DexHeader = collections.namedtuple('DexHeader',
+                                   ','.join(t[0] for t in _DEX_HEADER_FMT))
+
+# Simple memory items.
+_TypeIdItem = collections.namedtuple('TypeIdItem', 'descriptor_idx')
+_ProtoIdItem = collections.namedtuple(
+    'ProtoIdItem', 'shorty_idx,return_type_idx,parameters_off')
+_MethodIdItem = collections.namedtuple('MethodIdItem',
+                                       'type_idx,proto_idx,name_idx')
+_TypeItem = collections.namedtuple('TypeItem', 'type_idx')
+_StringDataItem = collections.namedtuple('StringItem', 'utf16_size,data')
+_ClassDefItem = collections.namedtuple(
+    'ClassDefItem',
+    'class_idx,access_flags,superclass_idx,interfaces_off,source_file_idx,'
+    'annotations_off,class_data_off,static_values_off')
+
+
+class _MemoryItemList(object):
+  """Base class for repeated memory items."""
+
+  def __init__(self,
+               reader,
+               offset,
+               size,
+               factory,
+               alignment=None,
+               first_item_offset=None):
+    """Creates the item list using the specific item factory.
+
+    Args:
+      reader: _DexReader used for decoding the memory item.
+      offset: Offset from start of the file to the item list, serving as the
+        key for some item types.
+      size: Number of memory items in the list.
+      factory: Function to extract each memory item from a _DexReader.
+      alignment: Optional integer specifying the alignment for the memory
+        section represented by this list.
+      first_item_offset: Optional, specifies a different offset to use for
+        extracting memory items (default is to use offset).
+    """
+    self.offset = offset
+    self.size = size
+    reader.Seek(first_item_offset or offset)
+    self._items = [factory(reader) for _ in xrange(size)]
+
+    if alignment:
+      reader.AlignUpTo(alignment)
+
+  def __iter__(self):
+    return iter(self._items)
+
+  def __getitem__(self, key):
+    return self._items[key]
+
+  def __len__(self):
+    return len(self._items)
+
+  def __repr__(self):
+    item_type_part = ''
+    if self.size != 0:
+      item_type = type(self._items[0])
+      item_type_part = ', item type={}'.format(item_type.__name__)
+
+    return '{}(offset={:#x}, size={}{})'.format(
+        type(self).__name__, self.offset, self.size, item_type_part)
+
+
+class _TypeIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = lambda x: _TypeIdItem(x.ReadUInt())
+    super(_TypeIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _ProtoIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = lambda x: _ProtoIdItem(x.ReadUInt(), x.ReadUInt(), x.ReadUInt())
+    super(_ProtoIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _MethodIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = (
+        lambda x: _MethodIdItem(x.ReadUShort(), x.ReadUShort(), x.ReadUInt()))
+    super(_MethodIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _StringItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    reader.Seek(offset)
+    string_item_offsets = iter([reader.ReadUInt() for _ in xrange(size)])
+
+    def factory(x):
+      data_offset = next(string_item_offsets)
+      string = x.ReadString(data_offset)
+      return _StringDataItem(len(string), string)
+
+    super(_StringItemList, self).__init__(reader, offset, size, factory)
+
+
+class _TypeListItem(_MemoryItemList):
+
+  def __init__(self, reader):
+    offset = reader.Tell()
+    size = reader.ReadUInt()
+    factory = lambda x: _TypeItem(x.ReadUShort())
+    # This is necessary because we need to extract the size of the type list
+    # (in other cases the list size is provided in the header).
+    first_item_offset = reader.Tell()
+    super(_TypeListItem, self).__init__(
+        reader,
+        offset,
+        size,
+        factory,
+        alignment=4,
+        first_item_offset=first_item_offset)
+
+
+class _TypeListItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    super(_TypeListItemList, self).__init__(reader, offset, size, _TypeListItem)
+
+
+class _ClassDefItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    reader.Seek(offset)
+
+    def factory(x):
+      return _ClassDefItem(*(x.ReadUInt()
+                             for _ in xrange(len(_ClassDefItem._fields))))
+
+    super(_ClassDefItemList, self).__init__(reader, offset, size, factory)
+
+
+class _DexMapItem(object):
+
+  def __init__(self, reader):
+    self.type = reader.ReadUShort()
+    reader.ReadUShort()
+    self.size = reader.ReadUInt()
+    self.offset = reader.ReadUInt()
+
+  def __repr__(self):
+    return '_DexMapItem(type={}, size={}, offset={:#x})'.format(
+        self.type, self.size, self.offset)
+
+
+class _DexMapList(object):
+  # Full list of type codes:
+  # https://source.android.com/devices/tech/dalvik/dex-format#type-codes
+  TYPE_TYPE_LIST = 0x1001
+
+  def __init__(self, reader, offset):
+    self._map = {}
+    reader.Seek(offset)
+    self._size = reader.ReadUInt()
+    for _ in xrange(self._size):
+      item = _DexMapItem(reader)
+      self._map[item.type] = item
+
+  def __getitem__(self, key):
+    return self._map[key]
+
+  def __contains__(self, key):
+    return key in self._map
+
+  def __repr__(self):
+    return '_DexMapList(size={}, items={})'.format(self._size, self._map)
+
+
+class _DexReader(object):
+
+  def __init__(self, data):
+    self._data = data
+    self._pos = 0
+
+  def Seek(self, offset):
+    self._pos = offset
+
+  def Tell(self):
+    return self._pos
+
+  def ReadUByte(self):
+    return self._ReadData('<B')
+
+  def ReadUShort(self):
+    return self._ReadData('<H')
+
+  def ReadUInt(self):
+    return self._ReadData('<I')
+
+  def ReadString(self, data_offset):
+    string_length, string_offset = self._ReadULeb128(data_offset)
+    string_data_offset = string_offset + data_offset
+    return self._DecodeMUtf8(string_length, string_data_offset)
+
+  def AlignUpTo(self, align_unit):
+    off_by = self._pos % align_unit
+    if off_by:
+      self.Seek(self._pos + align_unit - off_by)
+
+  def ReadHeader(self):
+    header_fmt = '<' + ''.join(t[1] for t in _DEX_HEADER_FMT)
+    return DexHeader._make(struct.unpack_from(header_fmt, self._data))
+
+  def _ReadData(self, fmt):
+    ret = struct.unpack_from(fmt, self._data, self._pos)[0]
+    self._pos += struct.calcsize(fmt)
+    return ret
+
+  def _ReadULeb128(self, data_offset):
+    """Returns a tuple of (uleb128 value, number of bytes occupied).
+
+    From DWARF3 spec: http://dwarfstd.org/doc/Dwarf3.pdf
+
+    Args:
+      data_offset: Location of the unsigned LEB128.
+    """
+    value = 0
+    shift = 0
+    cur_offset = data_offset
+    while True:
+      byte = self._data[cur_offset]
+      cur_offset += 1
+      value |= (byte & 0b01111111) << shift
+      if (byte & 0b10000000) == 0:
+        break
+      shift += 7
+
+    return value, cur_offset - data_offset
+
+  def _DecodeMUtf8(self, string_length, offset):
+    """Returns the string located at the specified offset.
+
+    See https://source.android.com/devices/tech/dalvik/dex-format#mutf-8
+
+    Ported from the Android Java implementation:
+    https://android.googlesource.com/platform/dalvik/+/fe107fb6e3f308ac5174ebdc5a794ee880c741d9/dx/src/com/android/dex/Mutf8.java#34
+
+    Args:
+      string_length: The length of the decoded string.
+      offset: Offset to the beginning of the string.
+    """
+    self.Seek(offset)
+    ret = ''
+
+    for _ in xrange(string_length):
+      a = self.ReadUByte()
+      if a == 0:
+        raise _MUTf8DecodeError('Early string termination encountered',
+                                string_length, offset)
+      if (a & 0x80) == 0x00:
+        code = a
+      elif (a & 0xe0) == 0xc0:
+        b = self.ReadUByte()
+        if (b & 0xc0) != 0x80:
+          raise _MUTf8DecodeError('Error in byte 2', string_length, offset)
+        code = ((a & 0x1f) << 6) | (b & 0x3f)
+      elif (a & 0xf0) == 0xe0:
+        b = self.ReadUByte()
+        c = self.ReadUByte()
+        if (b & 0xc0) != 0x80 or (c & 0xc0) != 0x80:
+          raise _MUTf8DecodeError('Error in byte 3 or 4', string_length, offset)
+        code = ((a & 0x0f) << 12) | ((b & 0x3f) << 6) | (c & 0x3f)
+      else:
+        raise _MUTf8DecodeError('Bad byte', string_length, offset)
+
+      ret += unichr(code)
+
+    if self.ReadUByte() != 0x00:
+      raise _MUTf8DecodeError('Expected string termination', string_length,
+                              offset)
+
+    return ret
+
+
+class _MUTf8DecodeError(Exception):
+
+  def __init__(self, message, length, offset):
+    message += ' (decoded string length: {}, string data offset: {:#x})'.format(
+        length, offset)
+    super(_MUTf8DecodeError, self).__init__(message)
+
+
+class DexFile(object):
+  """Represents a single dex file.
+
+  Parses and exposes access to dex file structure and contents, as described
+  at https://source.android.com/devices/tech/dalvik/dex-format
+
+  Fields:
+    reader: _DexReader object used to decode dex file contents.
+    header: DexHeader for this dex file.
+    map_list: _DexMapList object containing list of dex file contents.
+    type_item_list: _TypeIdItemList containing type_id_items.
+    proto_item_list: _ProtoIdItemList containing proto_id_items.
+    method_item_list: _MethodIdItemList containing method_id_items.
+    string_item_list: _StringItemList containing string_data_items that are
+      referenced by index in other sections.
+    type_list_item_list: _TypeListItemList containing _TypeListItems.
+      _TypeListItems are referenced by their offsets from other dex items.
+    class_def_item_list: _ClassDefItemList containing _ClassDefItems.
+  """
+  _CLASS_ACCESS_FLAGS = {
+      0x1: 'public',
+      0x2: 'private',
+      0x4: 'protected',
+      0x8: 'static',
+      0x10: 'final',
+      0x200: 'interface',
+      0x400: 'abstract',
+      0x1000: 'synthetic',
+      0x2000: 'annotation',
+      0x4000: 'enum',
+  }
+
+  def __init__(self, data):
+    """Decodes dex file memory sections.
+
+    Args:
+      data: bytearray containing the contents of a dex file.
+    """
+    self.reader = _DexReader(data)
+    self.header = self.reader.ReadHeader()
+    self.map_list = _DexMapList(self.reader, self.header.map_off)
+    self.type_item_list = _TypeIdItemList(self.reader, self.header.type_ids_off,
+                                          self.header.type_ids_size)
+    self.proto_item_list = _ProtoIdItemList(
+        self.reader, self.header.proto_ids_off, self.header.proto_ids_size)
+    self.method_item_list = _MethodIdItemList(
+        self.reader, self.header.method_ids_off, self.header.method_ids_size)
+    self.string_item_list = _StringItemList(
+        self.reader, self.header.string_ids_off, self.header.string_ids_size)
+    self.class_def_item_list = _ClassDefItemList(
+        self.reader, self.header.class_defs_off, self.header.class_defs_size)
+
+    type_list_key = _DexMapList.TYPE_TYPE_LIST
+    if type_list_key in self.map_list:
+      map_list_item = self.map_list[type_list_key]
+      self.type_list_item_list = _TypeListItemList(
+          self.reader, map_list_item.offset, map_list_item.size)
+    else:
+      self.type_list_item_list = _TypeListItemList(self.reader, 0, 0)
+    self._type_lists_by_offset = {
+        type_list.offset: type_list
+        for type_list in self.type_list_item_list
+    }
+
+  def GetString(self, string_item_idx):
+    string_item = self.string_item_list[string_item_idx]
+    return string_item.data
+
+  def GetTypeString(self, type_item_idx):
+    type_item = self.type_item_list[type_item_idx]
+    return self.GetString(type_item.descriptor_idx)
+
+  def GetTypeListStringsByOffset(self, offset):
+    if not offset:
+      return ()
+    type_list = self._type_lists_by_offset[offset]
+    return tuple(self.GetTypeString(item.type_idx) for item in type_list)
+
+  @staticmethod
+  def ResolveClassAccessFlags(access_flags):
+    return tuple(
+        flag_string
+        for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.iteritems()
+        if flag & access_flags)
+
+  def IterMethodSignatureParts(self):
+    """Yields the string components of dex methods in a dex file.
+
+    Yields:
+      Tuples that look like:
+        (class name, return type, method name, (parameter type, ...)).
+    """
+    for method_item in self.method_item_list:
+      class_name_string = self.GetTypeString(method_item.type_idx)
+      method_name_string = self.GetString(method_item.name_idx)
+      proto_item = self.proto_item_list[method_item.proto_idx]
+      return_type_string = self.GetTypeString(proto_item.return_type_idx)
+      parameter_types = self.GetTypeListStringsByOffset(
+          proto_item.parameters_off)
+      yield (class_name_string, return_type_string, method_name_string,
+             parameter_types)
+
+  def __repr__(self):
+    items = [
+        self.header,
+        self.map_list,
+        self.type_item_list,
+        self.proto_item_list,
+        self.method_item_list,
+        self.string_item_list,
+        self.type_list_item_list,
+        self.class_def_item_list,
+    ]
+    return '\n'.join(str(item) for item in items)
+
+
+class _DumpCommand(object):
+
+  def __init__(self, dexfile):
+    self._dexfile = dexfile
+
+  def Run(self):
+    raise NotImplementedError()
+
+
+class _DumpMethods(_DumpCommand):
+
+  def Run(self):
+    for parts in self._dexfile.IterMethodSignatureParts():
+      class_type, return_type, method_name, parameter_types = parts
+      print('{} {} (return type={}, parameters={})'.format(
+          class_type, method_name, return_type, parameter_types))
+
+
+class _DumpStrings(_DumpCommand):
+
+  def Run(self):
+    for string_item in self._dexfile.string_item_list:
+      # Some strings are likely to be non-ascii (vs. methods/classes).
+      print(string_item.data.encode('utf-8'))
+
+
+class _DumpClasses(_DumpCommand):
+
+  def Run(self):
+    for class_item in self._dexfile.class_def_item_list:
+      class_string = self._dexfile.GetTypeString(class_item.class_idx)
+      superclass_string = self._dexfile.GetTypeString(class_item.superclass_idx)
+      interfaces = self._dexfile.GetTypeListStringsByOffset(
+          class_item.interfaces_off)
+      access_flags = DexFile.ResolveClassAccessFlags(class_item.access_flags)
+      print('{} (superclass={}, interfaces={}, access_flags={})'.format(
+          class_string, superclass_string, interfaces, access_flags))
+
+
+class _DumpSummary(_DumpCommand):
+
+  def Run(self):
+    print(self._dexfile)
+
+
+def _DumpDexItems(dexfile_data, name, item):
+  dexfile = DexFile(bytearray(dexfile_data))
+  print('dex_parser: Dumping {} for {}'.format(item, name))
+  cmds = {
+      'summary': _DumpSummary,
+      'methods': _DumpMethods,
+      'strings': _DumpStrings,
+      'classes': _DumpClasses,
+  }
+  try:
+    cmds[item](dexfile).Run()
+  except IOError as e:
+    if e.errno == errno.EPIPE:
+      # Assume we're piping to "less", do nothing.
+      pass
+
+
+def main():
+  parser = argparse.ArgumentParser(description='Dump dex contents to stdout.')
+  parser.add_argument(
+      'input', help='Input (.dex, .jar, .zip, .aab, .apk) file path.')
+  parser.add_argument(
+      'item',
+      choices=('methods', 'strings', 'classes', 'summary'),
+      help='Item to dump',
+      nargs='?',
+      default='summary')
+  args = parser.parse_args()
+
+  if os.path.splitext(args.input)[1] in ('.apk', '.jar', '.zip', '.aab'):
+    with zipfile.ZipFile(args.input) as z:
+      dex_file_paths = [
+          f for f in z.namelist() if re.match(r'.*classes[0-9]*\.dex$', f)
+      ]
+      if not dex_file_paths:
+        print('Error: {} does not contain any classes.dex files'.format(
+            args.input))
+        sys.exit(1)
+
+      for path in dex_file_paths:
+        _DumpDexItems(z.read(path), path, args.item)
+
+  else:
+    with open(args.input) as f:
+      _DumpDexItems(f.read(), args.input, args.item)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/pylib/gtest/__init__.py b/src/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/gtest/filter/base_unittests_disabled b/src/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000..533d3e1
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..6bec7d0
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Additional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..cefc64f
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/src/build/android/pylib/gtest/filter/content_browsertests_disabled b/src/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000..9c89121
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,45 @@
+# List of suppressions
+# Timeouts
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+RenderAccessibilityImplTest.DetachAccessibilityObject
+
+# http://crbug.com/187500
+RenderViewImplTest.ImeComposition
+RenderViewImplTest.InsertCharacters
+RenderViewImplTest.OnHandleKeyboardEvent
+RenderViewImplTest.OnNavStateChanged
+# ZoomLevel is not used on Android
+RenderFrameImplTest.ZoomLimit
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+RendererAccessibilityTest.TextSelectionShouldSendRoot
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
+
+# http://crbug.com/1039450
+ProprietaryCodec/WebRtcMediaRecorderTest.*
diff --git a/src/build/android/pylib/gtest/filter/unit_tests_disabled b/src/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000..97811c8
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,74 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+PageInfoTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/src/build/android/pylib/gtest/gtest_config.py b/src/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000..3ac1955
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+    'components_browsertests',
+    'heap_profiler_unittests',
+    'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+    'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+    'android_webview_unittests',
+    'base_unittests',
+    'blink_unittests',
+    'breakpad_unittests',
+    'cc_unittests',
+    'components_unittests',
+    'content_browsertests',
+    'content_unittests',
+    'events_unittests',
+    'gl_tests',
+    'gl_unittests',
+    'gpu_unittests',
+    'ipc_tests',
+    'media_unittests',
+    'midi_unittests',
+    'net_unittests',
+    'sandbox_linux_unittests',
+    'skia_unittests',
+    'sql_unittests',
+    'storage_unittests',
+    'ui_android_unittests',
+    'ui_base_unittests',
+    'ui_touch_selection_unittests',
+    'unit_tests_apk',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+    'breakpad_unittests',
+    'sandbox_linux_unittests',
+
+    # The internal ASAN recipe cannot run step "unit_tests_apk", this is the
+    # only internal recipe affected. See http://crbug.com/607850
+    'unit_tests_apk',
+]
diff --git a/src/build/android/pylib/gtest/gtest_test_instance.py b/src/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000..a88c365
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,610 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import HTMLParser
+import json
+import logging
+import os
+import re
+import tempfile
+import threading
+import xml.etree.ElementTree
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.symbols import stack_symbolizer
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import unittest_util # pylint: disable=import-error
+
+
+BROWSER_TEST_SUITES = [
+    'android_browsertests',
+    'android_sync_integration_tests',
+    'components_browsertests',
+    'content_browsertests',
+    'weblayer_browsertests',
+]
+
+# The max number of tests to run on a shard during the test run.
+MAX_SHARDS = 256
+
+RUN_IN_SUB_THREAD_TEST_SUITES = [
+    # Multiprocess tests should be run outside of the main thread.
+    'base_unittests',  # file_locking_unittest.cc uses a child process.
+    'gwp_asan_unittests',
+    'ipc_perftests',
+    'ipc_tests',
+    'mojo_perftests',
+    'mojo_unittests',
+    'net_unittests'
+]
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'NativeTestActivity')
+_EXTRA_RUN_IN_SUB_THREAD = (
+    'org.chromium.native_test.NativeTest.RunInSubThread')
+EXTRA_SHARD_NANO_TIMEOUT = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardNanoTimeout')
+_EXTRA_SHARD_SIZE_LIMIT = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+    # Test state.
+    r'\[ +((?:RUN)|(?:FAILED)|(?:OK)|(?:CRASHED)|(?:SKIPPED)) +\] ?'
+    # Test name.
+    r'([^ ]+)?'
+    # Optional parameters.
+    r'(?:, where'
+    #   Type parameter
+    r'(?: TypeParam = [^()]*(?: and)?)?'
+    #   Value parameter
+    r'(?: GetParam\(\) = [^()]*)?'
+    # End of optional parameters.
+    ')?'
+    # Optional test execution time.
+    r'(?: \((\d+) ms\))?$')
+# Crash detection constants.
+_RE_TEST_ERROR = re.compile(r'FAILURES!!! Tests run: \d+,'
+                                    r' Failures: \d+, Errors: 1')
+_RE_TEST_CURRENTLY_RUNNING = re.compile(
+    r'\[ERROR:.*?\] Currently running: (.*)')
+_RE_TEST_DCHECK_FATAL = re.compile(r'\[.*:FATAL:.*\] (.*)')
+_RE_DISABLED = re.compile(r'DISABLED_')
+_RE_FLAKY = re.compile(r'FLAKY_')
+
+# Detect stack line in stdout.
+_STACK_LINE_RE = re.compile(r'\s*#\d+')
+
+def ParseGTestListTests(raw_list):
+  """Parses a raw test list as provided by --gtest_list_tests.
+
+  Args:
+    raw_list: The raw test listing with the following format:
+
+    IPCChannelTest.
+      SendMessageInChannelConnected
+    IPCSyncChannelTest.
+      Simple
+      DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+  Returns:
+    A list of all tests. For the above raw listing:
+
+    [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+     IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+  """
+  ret = []
+  current = ''
+  for test in raw_list:
+    if not test:
+      continue
+    if not test.startswith(' '):
+      test_case = test.split()[0]
+      if test_case.endswith('.'):
+        current = test_case
+    else:
+      test = test.strip()
+      if test and not 'YOU HAVE' in test:
+        test_name = test.split()[0]
+        ret += [current + test_name]
+  return ret
+
+
+def ParseGTestOutput(output, symbolizer, device_abi):
+  """Parses raw gtest output and returns a list of results.
+
+  Args:
+    output: A list of output lines.
+    symbolizer: The symbolizer used to symbolize stack.
+    device_abi: Device abi that is needed for symbolization.
+  Returns:
+    A list of base_test_result.BaseTestResults.
+  """
+  duration = 0
+  fallback_result_type = None
+  log = []
+  stack = []
+  result_type = None
+  results = []
+  test_name = None
+
+  def symbolize_stack_and_merge_with_log():
+    log_string = '\n'.join(log or [])
+    if not stack:
+      stack_string = ''
+    else:
+      stack_string = '\n'.join(
+          symbolizer.ExtractAndResolveNativeStackTraces(
+              stack, device_abi))
+    return '%s\n%s' % (log_string, stack_string)
+
+  def handle_possibly_unknown_test():
+    if test_name is not None:
+      results.append(
+          base_test_result.BaseTestResult(
+              TestNameWithoutDisabledPrefix(test_name),
+              # If we get here, that means we started a test, but it did not
+              # produce a definitive test status output, so assume it crashed.
+              # crbug/1191716
+              fallback_result_type or base_test_result.ResultType.CRASH,
+              duration,
+              log=symbolize_stack_and_merge_with_log()))
+
+  for l in output:
+    matcher = _RE_TEST_STATUS.match(l)
+    if matcher:
+      if matcher.group(1) == 'RUN':
+        handle_possibly_unknown_test()
+        duration = 0
+        fallback_result_type = None
+        log = []
+        stack = []
+        result_type = None
+      elif matcher.group(1) == 'OK':
+        result_type = base_test_result.ResultType.PASS
+      elif matcher.group(1) == 'SKIPPED':
+        result_type = base_test_result.ResultType.SKIP
+      elif matcher.group(1) == 'FAILED':
+        result_type = base_test_result.ResultType.FAIL
+      elif matcher.group(1) == 'CRASHED':
+        fallback_result_type = base_test_result.ResultType.CRASH
+      # Be aware that test name and status might not appear on same line.
+      test_name = matcher.group(2) if matcher.group(2) else test_name
+      duration = int(matcher.group(3)) if matcher.group(3) else 0
+
+    else:
+      # Can possibly add more matchers, such as different results from DCHECK.
+      currently_running_matcher = _RE_TEST_CURRENTLY_RUNNING.match(l)
+      dcheck_matcher = _RE_TEST_DCHECK_FATAL.match(l)
+
+      if currently_running_matcher:
+        test_name = currently_running_matcher.group(1)
+        result_type = base_test_result.ResultType.CRASH
+        duration = None  # Don't know. Not using 0 as this is unknown vs 0.
+      elif dcheck_matcher:
+        result_type = base_test_result.ResultType.CRASH
+        duration = None  # Don't know.  Not using 0 as this is unknown vs 0.
+
+    if log is not None:
+      if not matcher and _STACK_LINE_RE.match(l):
+        stack.append(l)
+      else:
+        log.append(l)
+
+    if result_type and test_name:
+      # Don't bother symbolizing output if the test passed.
+      if result_type == base_test_result.ResultType.PASS:
+        stack = []
+      results.append(base_test_result.BaseTestResult(
+          TestNameWithoutDisabledPrefix(test_name), result_type, duration,
+          log=symbolize_stack_and_merge_with_log()))
+      test_name = None
+
+  handle_possibly_unknown_test()
+
+  return results
+
+
+def ParseGTestXML(xml_content):
+  """Parse gtest XML result."""
+  results = []
+  if not xml_content:
+    return results
+
+  html = HTMLParser.HTMLParser()
+
+  testsuites = xml.etree.ElementTree.fromstring(xml_content)
+  for testsuite in testsuites:
+    suite_name = testsuite.attrib['name']
+    for testcase in testsuite:
+      case_name = testcase.attrib['name']
+      result_type = base_test_result.ResultType.PASS
+      log = []
+      for failure in testcase:
+        result_type = base_test_result.ResultType.FAIL
+        log.append(html.unescape(failure.attrib['message']))
+
+      results.append(base_test_result.BaseTestResult(
+          '%s.%s' % (suite_name, TestNameWithoutDisabledPrefix(case_name)),
+          result_type,
+          int(float(testcase.attrib['time']) * 1000),
+          log=('\n'.join(log) if log else '')))
+
+  return results
+
+
+def ParseGTestJSON(json_content):
+  """Parse results in the JSON Test Results format."""
+  results = []
+  if not json_content:
+    return results
+
+  json_data = json.loads(json_content)
+
+  openstack = json_data['tests'].items()
+
+  while openstack:
+    name, value = openstack.pop()
+
+    if 'expected' in value and 'actual' in value:
+      result_type = base_test_result.ResultType.PASS if value[
+          'actual'] == 'PASS' else base_test_result.ResultType.FAIL
+      results.append(base_test_result.BaseTestResult(name, result_type))
+    else:
+      openstack += [("%s.%s" % (name, k), v) for k, v in value.iteritems()]
+
+  return results
+
+
+def TestNameWithoutDisabledPrefix(test_name):
+  """Modify the test name without disabled prefix if prefix 'DISABLED_' or
+  'FLAKY_' presents.
+
+  Args:
+    test_name: The name of a test.
+  Returns:
+    A test name without prefix 'DISABLED_' or 'FLAKY_'.
+  """
+  disabled_prefixes = [_RE_DISABLED, _RE_FLAKY]
+  for dp in disabled_prefixes:
+    test_name = dp.sub('', test_name)
+  return test_name
+
+class GtestTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, data_deps_delegate, error_func):
+    super(GtestTestInstance, self).__init__()
+    # TODO(jbudorick): Support multiple test suites.
+    if len(args.suite_name) > 1:
+      raise ValueError('Platform mode currently supports only 1 gtest suite')
+    self._coverage_dir = args.coverage_dir
+    self._exe_dist_dir = None
+    self._external_shard_index = args.test_launcher_shard_index
+    self._extract_test_list_from_filter = args.extract_test_list_from_filter
+    self._filter_tests_lock = threading.Lock()
+    self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket
+    self._isolated_script_test_output = args.isolated_script_test_output
+    self._isolated_script_test_perf_output = (
+        args.isolated_script_test_perf_output)
+    self._render_test_output_dir = args.render_test_output_dir
+    self._shard_timeout = args.shard_timeout
+    self._store_tombstones = args.store_tombstones
+    self._suite = args.suite_name[0]
+    self._symbolizer = stack_symbolizer.Symbolizer(None)
+    self._total_external_shards = args.test_launcher_total_shards
+    self._wait_for_java_debugger = args.wait_for_java_debugger
+
+    # GYP:
+    if args.executable_dist_dir:
+      self._exe_dist_dir = os.path.abspath(args.executable_dist_dir)
+    else:
+      # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly.
+      exe_dist_dir = os.path.join(constants.GetOutDirectory(),
+                                  '%s__dist' % self._suite)
+
+      if os.path.exists(exe_dist_dir):
+        self._exe_dist_dir = exe_dist_dir
+
+    incremental_part = ''
+    if args.test_apk_incremental_install_json:
+      incremental_part = '_incremental'
+
+    self._test_launcher_batch_limit = MAX_SHARDS
+    if (args.test_launcher_batch_limit
+        and 0 < args.test_launcher_batch_limit < MAX_SHARDS):
+      self._test_launcher_batch_limit = args.test_launcher_batch_limit
+
+    apk_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % self._suite,
+        '%s-debug%s.apk' % (self._suite, incremental_part))
+    self._test_apk_incremental_install_json = (
+        args.test_apk_incremental_install_json)
+    if not os.path.exists(apk_path):
+      self._apk_helper = None
+    else:
+      self._apk_helper = apk_helper.ApkHelper(apk_path)
+      self._extras = {
+          _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(),
+      }
+      if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES:
+        self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1
+      if self._suite in BROWSER_TEST_SUITES:
+        self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+        self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout)
+        self._shard_timeout = 10 * self._shard_timeout
+      if args.wait_for_java_debugger:
+        self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e15)  # Forever
+
+    if not self._apk_helper and not self._exe_dist_dir:
+      error_func('Could not find apk or executable for %s' % self._suite)
+
+    self._data_deps = []
+    self._gtest_filter = test_filter.InitializeFilterFromArgs(args)
+    self._run_disabled = args.run_disabled
+
+    self._data_deps_delegate = data_deps_delegate
+    self._runtime_deps_path = args.runtime_deps_path
+    if not self._runtime_deps_path:
+      logging.warning('No data dependencies will be pushed.')
+
+    if args.app_data_files:
+      self._app_data_files = args.app_data_files
+      if args.app_data_file_dir:
+        self._app_data_file_dir = args.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+    self._flags = None
+    self._initializeCommandLineFlags(args)
+
+    # TODO(jbudorick): Remove this once it's deployed.
+    self._enable_xml_result_parsing = args.enable_xml_result_parsing
+
+  def _initializeCommandLineFlags(self, args):
+    self._flags = []
+    if args.command_line_flags:
+      self._flags.extend(args.command_line_flags)
+    if args.device_flags_file:
+      with open(args.device_flags_file) as f:
+        stripped_lines = (l.strip() for l in f)
+        self._flags.extend(flag for flag in stripped_lines if flag)
+    if args.run_disabled:
+      self._flags.append('--gtest_also_run_disabled_tests')
+
+  @property
+  def activity(self):
+    return self._apk_helper and self._apk_helper.GetActivityName()
+
+  @property
+  def apk(self):
+    return self._apk_helper and self._apk_helper.path
+
+  @property
+  def apk_helper(self):
+    return self._apk_helper
+
+  @property
+  def app_file_dir(self):
+    return self._app_data_file_dir
+
+  @property
+  def app_files(self):
+    return self._app_data_files
+
+  @property
+  def coverage_dir(self):
+    return self._coverage_dir
+
+  @property
+  def enable_xml_result_parsing(self):
+    return self._enable_xml_result_parsing
+
+  @property
+  def exe_dist_dir(self):
+    return self._exe_dist_dir
+
+  @property
+  def external_shard_index(self):
+    return self._external_shard_index
+
+  @property
+  def extract_test_list_from_filter(self):
+    return self._extract_test_list_from_filter
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def gs_test_artifacts_bucket(self):
+    return self._gs_test_artifacts_bucket
+
+  @property
+  def gtest_filter(self):
+    return self._gtest_filter
+
+  @property
+  def isolated_script_test_output(self):
+    return self._isolated_script_test_output
+
+  @property
+  def isolated_script_test_perf_output(self):
+    return self._isolated_script_test_perf_output
+
+  @property
+  def render_test_output_dir(self):
+    return self._render_test_output_dir
+
+  @property
+  def package(self):
+    return self._apk_helper and self._apk_helper.GetPackageName()
+
+  @property
+  def permissions(self):
+    return self._apk_helper and self._apk_helper.GetPermissions()
+
+  @property
+  def runner(self):
+    return self._apk_helper and self._apk_helper.GetInstrumentationName()
+
+  @property
+  def shard_timeout(self):
+    return self._shard_timeout
+
+  @property
+  def store_tombstones(self):
+    return self._store_tombstones
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def symbolizer(self):
+    return self._symbolizer
+
+  @property
+  def test_apk_incremental_install_json(self):
+    return self._test_apk_incremental_install_json
+
+  @property
+  def test_launcher_batch_limit(self):
+    return self._test_launcher_batch_limit
+
+  @property
+  def total_external_shards(self):
+    return self._total_external_shards
+
+  @property
+  def wait_for_java_debugger(self):
+    return self._wait_for_java_debugger
+
+  #override
+  def TestType(self):
+    return 'gtest'
+
+  #override
+  def GetPreferredAbis(self):
+    if not self._apk_helper:
+      return None
+    return self._apk_helper.GetAbis()
+
+  #override
+  def SetUp(self):
+    """Map data dependencies via isolate."""
+    self._data_deps.extend(
+        self._data_deps_delegate(self._runtime_deps_path))
+
+  def GetDataDependencies(self):
+    """Returns the test suite's data dependencies.
+
+    Returns:
+      A list of (host_path, device_path) tuples to push. If device_path is
+      None, the client is responsible for determining where to push the file.
+    """
+    return self._data_deps
+
+  def FilterTests(self, test_list, disabled_prefixes=None):
+    """Filters |test_list| based on prefixes and, if present, a filter string.
+
+    Args:
+      test_list: The list of tests to filter.
+      disabled_prefixes: A list of test prefixes to filter. Defaults to
+        DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+    Returns:
+      A filtered list of tests to run.
+    """
+    gtest_filter_strings = [
+        self._GenerateDisabledFilterString(disabled_prefixes)]
+    if self._gtest_filter:
+      gtest_filter_strings.append(self._gtest_filter)
+
+    filtered_test_list = test_list
+    # This lock is required because on older versions of Python
+    # |unittest_util.FilterTestNames| use of |fnmatch| is not threadsafe.
+    with self._filter_tests_lock:
+      for gtest_filter_string in gtest_filter_strings:
+        logging.debug('Filtering tests using: %s', gtest_filter_string)
+        filtered_test_list = unittest_util.FilterTestNames(
+            filtered_test_list, gtest_filter_string)
+
+      if self._run_disabled and self._gtest_filter:
+        out_filtered_test_list = list(set(test_list)-set(filtered_test_list))
+        for test in out_filtered_test_list:
+          test_name_no_disabled = TestNameWithoutDisabledPrefix(test)
+          if test_name_no_disabled != test and unittest_util.FilterTestNames(
+              [test_name_no_disabled], self._gtest_filter):
+            filtered_test_list.append(test)
+    return filtered_test_list
+
+  def _GenerateDisabledFilterString(self, disabled_prefixes):
+    disabled_filter_items = []
+
+    if disabled_prefixes is None:
+      disabled_prefixes = ['FAILS_', 'PRE_']
+      if '--run-manual' not in self._flags:
+        disabled_prefixes += ['MANUAL_']
+      if not self._run_disabled:
+        disabled_prefixes += ['DISABLED_', 'FLAKY_']
+
+    disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+    disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+    disabled_tests_file_path = os.path.join(
+        host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+        'filter', '%s_disabled' % self._suite)
+    if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+      with open(disabled_tests_file_path) as disabled_tests_file:
+        disabled_filter_items += [
+            '%s' % l for l in (line.strip() for line in disabled_tests_file)
+            if l and not l.startswith('#')]
+
+    return '*-%s' % ':'.join(disabled_filter_items)
+
+  #override
+  def TearDown(self):
+    """Do nothing."""
+    pass
diff --git a/src/build/android/pylib/gtest/gtest_test_instance_test.py b/src/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000..1429e3d
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+  def testParseGTestListTests_simple(self):
+    raw_output = [
+      'TestCaseOne.',
+      '  testOne',
+      '  testTwo',
+      'TestCaseTwo.',
+      '  testThree',
+      '  testFour',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCaseOne.testOne',
+      'TestCaseOne.testTwo',
+      'TestCaseTwo.testThree',
+      'TestCaseTwo.testFour',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_old(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_new(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.  # TypeParam = TypeParam0',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_old(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0',
+      '  testWithValueParam/1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_new(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0  # GetParam() = 0',
+      '  testWithValueParam/1  # GetParam() = 1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_emptyTestName(self):
+    raw_output = [
+      'TestCase.',
+      '  ',
+      '  nonEmptyTestName',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCase.nonEmptyTestName',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestOutput_pass(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[       OK ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testParseGTestOutput_fail(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[   FAILED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_crash(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[  CRASHED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_errorCrash(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ERROR:blah] Currently running: FooTest.Bar',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertIsNone(actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_fatalDcheck(self):
+    raw_output = [
+        '[ RUN      ] FooTest.Bar',
+        '[0324/183029.116334:FATAL:test_timeouts.cc(103)] Check failed: !init',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertIsNone(actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_unknown(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(0, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_nonterminalUnknown(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ RUN      ] FooTest.Baz',
+      '[       OK ] FooTest.Baz (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(2, len(actual))
+
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(0, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+    self.assertEquals('FooTest.Baz', actual[1].GetName())
+    self.assertEquals(1, actual[1].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType())
+
+  def testParseGTestOutput_deathTestCrashOk(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ CRASHED      ]',
+      '[       OK ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testParseGTestOutput_typeParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0, where TypeParam =  (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_valueParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0,' +
+        ' where GetParam() = 4-byte object <00-00 00-00> (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_typeAndValueParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0,' +
+        ' where TypeParam =  and GetParam() =  (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_skippedTest(self):
+    raw_output = [
+        '[ RUN      ] FooTest.Bar',
+        '[  SKIPPED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.SKIP, actual[0].GetType())
+
+  def testParseGTestXML_none(self):
+    actual = gtest_test_instance.ParseGTestXML(None)
+    self.assertEquals([], actual)
+
+  def testParseGTestJSON_none(self):
+    actual = gtest_test_instance.ParseGTestJSON(None)
+    self.assertEquals([], actual)
+
+  def testParseGTestJSON_example(self):
+    raw_json = """
+      {
+        "tests": {
+          "mojom_tests": {
+            "parse": {
+              "ast_unittest": {
+                "ASTTest": {
+                  "testNodeBase": {
+                    "expected": "PASS",
+                    "actual": "PASS",
+                    "artifacts": {
+                      "screenshot": ["screenshots/page.png"]
+                    }
+                  }
+                }
+              }
+            }
+          }
+        },
+        "interrupted": false,
+        "path_delimiter": ".",
+        "version": 3,
+        "seconds_since_epoch": 1406662283.764424,
+        "num_failures_by_type": {
+          "FAIL": 0,
+          "PASS": 1
+        },
+        "artifact_types": {
+          "screenshot": "image/png"
+        }
+      }"""
+    actual = gtest_test_instance.ParseGTestJSON(raw_json)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase',
+                      actual[0].GetName())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testTestNameWithoutDisabledPrefix_disabled(self):
+    test_name_list = [
+      'A.DISABLED_B',
+      'DISABLED_A.B',
+      'DISABLED_A.DISABLED_B',
+    ]
+    for test_name in test_name_list:
+      actual = gtest_test_instance \
+          .TestNameWithoutDisabledPrefix(test_name)
+      expected = 'A.B'
+      self.assertEquals(expected, actual)
+
+  def testTestNameWithoutDisabledPrefix_flaky(self):
+    test_name_list = [
+      'A.FLAKY_B',
+      'FLAKY_A.B',
+      'FLAKY_A.FLAKY_B',
+    ]
+    for test_name in test_name_list:
+      actual = gtest_test_instance \
+          .TestNameWithoutDisabledPrefix(test_name)
+      expected = 'A.B'
+      self.assertEquals(expected, actual)
+
+  def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self):
+    test_name = 'A.B'
+    actual = gtest_test_instance \
+        .TestNameWithoutDisabledPrefix(test_name)
+    expected = 'A.B'
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/__init__.py b/src/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/instrumentation/instrumentation_parser.py b/src/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000..dd9f9cc
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,111 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# AndroidJUnitRunner would status output -3 to indicate a test is skipped
+STATUS_CODE_SKIP = -3
+
+# AndroidJUnitRunner outputs -4 to indicate a failed assumption
+# "A test for which an assumption fails should not generate a test
+# case failure"
+# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html
+STATUS_CODE_ASSUMPTION_FAILURE = -4
+
+STATUS_CODE_TEST_DURATION = 1337
+
+# When a test batch fails due to post-test Assertion failures (eg.
+# LifetimeAssert).
+STATUS_CODE_BATCH_FAILURE = 1338
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+  def __init__(self, stream):
+    """An incremental parser for the output of Android instrumentation tests.
+
+    Example:
+
+      stream = adb.IterShell('am instrument -r ...')
+      parser = InstrumentationParser(stream)
+
+      for code, bundle in parser.IterStatus():
+        # do something with each instrumentation status
+        print('status:', code, bundle)
+
+      # do something with the final instrumentation result
+      code, bundle = parser.GetResult()
+      print('result:', code, bundle)
+
+    Args:
+      stream: a sequence of lines as produced by the raw output of an
+        instrumentation test (e.g. by |am instrument -r|).
+    """
+    self._stream = stream
+    self._code = None
+    self._bundle = None
+
+  def IterStatus(self):
+    """Iterate over statuses as they are produced by the instrumentation test.
+
+    Yields:
+      A tuple (code, bundle) for each instrumentation status found in the
+      output.
+    """
+    def join_bundle_values(bundle):
+      for key in bundle:
+        bundle[key] = '\n'.join(bundle[key])
+      return bundle
+
+    bundle = {'STATUS': {}, 'RESULT': {}}
+    header = None
+    key = None
+    for line in self._stream:
+      m = _INSTR_LINE_RE.match(line)
+      if m:
+        header, value = m.groups()
+        key = None
+        if header in ['STATUS', 'RESULT'] and '=' in value:
+          key, value = value.split('=', 1)
+          bundle[header][key] = [value]
+        elif header == 'STATUS_CODE':
+          yield int(value), join_bundle_values(bundle['STATUS'])
+          bundle['STATUS'] = {}
+        elif header == 'CODE':
+          self._code = int(value)
+        else:
+          logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+      elif key is not None:
+        bundle[header][key].append(line)
+
+    self._bundle = join_bundle_values(bundle['RESULT'])
+
+  def GetResult(self):
+    """Return the final instrumentation result.
+
+    Returns:
+      A pair (code, bundle) with the final instrumentation result. The |code|
+      may be None if no instrumentation result was found in the output.
+
+    Raises:
+      AssertionError if attempting to get the instrumentation result before
+      exhausting |IterStatus| first.
+    """
+    assert self._bundle is not None, (
+        'The IterStatus generator must be exhausted before reading the final'
+        ' instrumentation result.')
+    return self._code, self._bundle
diff --git a/src/build/android/pylib/instrumentation/instrumentation_parser_test.py b/src/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000..d664455
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+  def testInstrumentationParser_nothing(self):
+    parser = instrumentation_parser.InstrumentationParser([''])
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_noMatchingStarts(self):
+    raw_output = [
+      '',
+      'this.is.a.test.package.TestClass:.',
+      'Test result for =.',
+      'Time: 1.234',
+      '',
+      'OK (1 test)',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_resultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+      'INSTRUMENTATION_RESULT: longMsg=a foo',
+      'walked into',
+      'a bar',
+      'INSTRUMENTATION_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(-1, code)
+    self.assertEqual(
+        {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_oneStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: foo=1',
+      'INSTRUMENTATION_STATUS: bar=hello',
+      'INSTRUMENTATION_STATUS: world=false',
+      'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+      'INSTRUMENTATION_STATUS: test=testMethod',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (0, {
+        'foo': '1',
+        'bar': 'hello',
+        'world': 'false',
+        'class': 'this.is.a.test.package.TestClass',
+        'test': 'testMethod',
+      })
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_multiStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_STATUS: test_skipped=true',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+      'INSTRUMENTATION_STATUS: class=hello',
+      'INSTRUMENTATION_STATUS: test=world',
+      'INSTRUMENTATION_STATUS: stack=',
+      'foo/bar.py (27)',
+      'hello/world.py (42)',
+      'test/file.py (1)',
+      'INSTRUMENTATION_STATUS_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (1, {'class': 'foo', 'test': 'bar',}),
+      (0, {'test_skipped': 'true'}),
+      (-1, {
+        'class': 'hello',
+        'test': 'world',
+        'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+      }),
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_statusResultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_RESULT: result=hello',
+      'world',
+      '',
+      '',
+      'INSTRUMENTATION_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+
+    self.assertEqual(0, code)
+    self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+    self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/instrumentation_test_instance.py b/src/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000..5493c36
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,1039 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import logging
+import os
+import pickle
+import re
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_exception
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.symbols import deobfuscator
+from pylib.symbols import stack_symbolizer
+from pylib.utils import dexdump
+from pylib.utils import gold_utils
+from pylib.utils import instrumentation_tracing
+from pylib.utils import proguard
+from pylib.utils import shared_preference_utils
+from pylib.utils import test_filter
+
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import unittest_util # pylint: disable=import-error
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter'
+_DEFAULT_ANNOTATIONS = [
+    'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest']
+_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [
+    'DisabledTest', 'FlakyTest', 'Manual']
+_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS +
+                         _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS)
+
+_TEST_LIST_JUNIT4_RUNNERS = [
+    'org.chromium.base.test.BaseChromiumAndroidJUnitRunner']
+
+_SKIP_PARAMETERIZATION = 'SkipCommandLineParameterization'
+_PARAMETERIZED_COMMAND_LINE_FLAGS = 'ParameterizedCommandLineFlags'
+_PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES = (
+    'ParameterizedCommandLineFlags$Switches')
+_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 12
+
+# The ID of the bundle value Instrumentation uses to report which test index the
+# results are for in a collection of tests. Note that this index is 1-based.
+_BUNDLE_CURRENT_ID = 'current'
+# The ID of the bundle value Instrumentation uses to report the test class.
+_BUNDLE_CLASS_ID = 'class'
+# The ID of the bundle value Instrumentation uses to report the test name.
+_BUNDLE_TEST_ID = 'test'
+# The ID of the bundle value Instrumentation uses to report if a test was
+# skipped.
+_BUNDLE_SKIPPED_ID = 'test_skipped'
+# The ID of the bundle value Instrumentation uses to report the crash stack, if
+# the test crashed.
+_BUNDLE_STACK_ID = 'stack'
+
+# The ID of the bundle value Chrome uses to report the test duration.
+_BUNDLE_DURATION_ID = 'duration_ms'
+
+class MissingSizeAnnotationError(test_exception.TestException):
+  def __init__(self, class_name):
+    super(MissingSizeAnnotationError, self).__init__(class_name +
+        ': Test method is missing required size annotation. Add one of: ' +
+        ', '.join('@' + a for a in _VALID_ANNOTATIONS))
+
+
+class CommandLineParameterizationException(test_exception.TestException):
+
+  def __init__(self, msg):
+    super(CommandLineParameterizationException, self).__init__(msg)
+
+
+class TestListPickleException(test_exception.TestException):
+  pass
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation junit3_runner_class is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+  """Parses the output of an |am instrument -r| call.
+
+  Args:
+    raw_output: the output of an |am instrument -r| call as a list of lines
+  Returns:
+    A 3-tuple containing:
+      - the instrumentation code as an integer
+      - the instrumentation result as a list of lines
+      - the instrumentation statuses received as a list of 2-tuples
+        containing:
+        - the status code as an integer
+        - the bundle dump as a dict mapping string keys to a list of
+          strings, one for each line.
+  """
+  parser = instrumentation_parser.InstrumentationParser(raw_output)
+  statuses = list(parser.IterStatus())
+  code, bundle = parser.GetResult()
+  return (code, bundle, statuses)
+
+
+def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+                        device_abi, symbolizer):
+  """Generate test results from |statuses|.
+
+  Args:
+    result_code: The overall status code as an integer.
+    result_bundle: The summary bundle dump as a dict.
+    statuses: A list of 2-tuples containing:
+      - the status code as an integer
+      - the bundle dump as a dict mapping string keys to string values
+      Note that this is the same as the third item in the 3-tuple returned by
+      |_ParseAmInstrumentRawOutput|.
+    duration_ms: The duration of the test in milliseconds.
+    device_abi: The device_abi, which is needed for symbolization.
+    symbolizer: The symbolizer used to symbolize stack.
+
+  Returns:
+    A list containing an instance of InstrumentationTestResult for each test
+    parsed.
+  """
+
+  results = []
+
+  current_result = None
+  cumulative_duration = 0
+
+  for status_code, bundle in statuses:
+    # If the last test was a failure already, don't override that failure with
+    # post-test failures that could be caused by the original failure.
+    if (status_code == instrumentation_parser.STATUS_CODE_BATCH_FAILURE
+        and current_result.GetType() != base_test_result.ResultType.FAIL):
+      current_result.SetType(base_test_result.ResultType.FAIL)
+      _MaybeSetLog(bundle, current_result, symbolizer, device_abi)
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_TEST_DURATION:
+      # For the first result, duration will be set below to the difference
+      # between the reported and actual durations to account for overhead like
+      # starting instrumentation.
+      if results:
+        current_duration = int(bundle.get(_BUNDLE_DURATION_ID, duration_ms))
+        current_result.SetDuration(current_duration)
+        cumulative_duration += current_duration
+      continue
+
+    test_class = bundle.get(_BUNDLE_CLASS_ID, '')
+    test_method = bundle.get(_BUNDLE_TEST_ID, '')
+    if test_class and test_method:
+      test_name = '%s#%s' % (test_class, test_method)
+    else:
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_START:
+      if current_result:
+        results.append(current_result)
+      current_result = test_result.InstrumentationTestResult(
+          test_name, base_test_result.ResultType.UNKNOWN, duration_ms)
+    else:
+      if status_code == instrumentation_parser.STATUS_CODE_OK:
+        if bundle.get(_BUNDLE_SKIPPED_ID, '').lower() in ('true', '1', 'yes'):
+          current_result.SetType(base_test_result.ResultType.SKIP)
+        elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+          current_result.SetType(base_test_result.ResultType.PASS)
+      elif status_code == instrumentation_parser.STATUS_CODE_SKIP:
+        current_result.SetType(base_test_result.ResultType.SKIP)
+      elif status_code == instrumentation_parser.STATUS_CODE_ASSUMPTION_FAILURE:
+        current_result.SetType(base_test_result.ResultType.SKIP)
+      else:
+        if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+                               instrumentation_parser.STATUS_CODE_FAILURE):
+          logging.error('Unrecognized status code %d. Handling as an error.',
+                        status_code)
+        current_result.SetType(base_test_result.ResultType.FAIL)
+    _MaybeSetLog(bundle, current_result, symbolizer, device_abi)
+
+  if current_result:
+    if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+      crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+                 and any(_NATIVE_CRASH_RE.search(l)
+                         for l in result_bundle.itervalues()))
+      if crashed:
+        current_result.SetType(base_test_result.ResultType.CRASH)
+
+    results.append(current_result)
+
+  if results:
+    logging.info('Adding cumulative overhead to test %s: %dms',
+                 results[0].GetName(), duration_ms - cumulative_duration)
+    results[0].SetDuration(duration_ms - cumulative_duration)
+
+  return results
+
+
+def _MaybeSetLog(bundle, current_result, symbolizer, device_abi):
+  if _BUNDLE_STACK_ID in bundle:
+    if symbolizer and device_abi:
+      current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join(
+          symbolizer.ExtractAndResolveNativeStackTraces(
+              bundle[_BUNDLE_STACK_ID], device_abi))))
+    else:
+      current_result.SetLog(bundle[_BUNDLE_STACK_ID])
+
+
+def FilterTests(tests, filter_str=None, annotations=None,
+                excluded_annotations=None):
+  """Filter a list of tests
+
+  Args:
+    tests: a list of tests. e.g. [
+           {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+           {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+    filter_str: googletest-style filter string.
+    annotations: a dict of wanted annotations for test methods.
+    exclude_annotations: a dict of annotations to exclude.
+
+  Return:
+    A list of filtered tests
+  """
+  def gtest_filter(t):
+    if not filter_str:
+      return True
+    # Allow fully-qualified name as well as an omitted package.
+    unqualified_class_test = {
+      'class': t['class'].split('.')[-1],
+      'method': t['method']
+    }
+    names = [
+      GetTestName(t, sep='.'),
+      GetTestName(unqualified_class_test, sep='.'),
+      GetUniqueTestName(t, sep='.')
+    ]
+
+    if t['is_junit4']:
+      names += [
+          GetTestNameWithoutParameterPostfix(t, sep='.'),
+          GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
+      ]
+
+    pattern_groups = filter_str.split('-')
+    if len(pattern_groups) > 1:
+      negative_filter = pattern_groups[1]
+      if unittest_util.FilterTestNames(names, negative_filter):
+        return []
+
+    positive_filter = pattern_groups[0]
+    return unittest_util.FilterTestNames(names, positive_filter)
+
+  def annotation_filter(all_annotations):
+    if not annotations:
+      return True
+    return any_annotation_matches(annotations, all_annotations)
+
+  def excluded_annotation_filter(all_annotations):
+    if not excluded_annotations:
+      return True
+    return not any_annotation_matches(excluded_annotations,
+                                      all_annotations)
+
+  def any_annotation_matches(filter_annotations, all_annotations):
+    return any(
+        ak in all_annotations
+        and annotation_value_matches(av, all_annotations[ak])
+        for ak, av in filter_annotations)
+
+  def annotation_value_matches(filter_av, av):
+    if filter_av is None:
+      return True
+    elif isinstance(av, dict):
+      tav_from_dict = av['value']
+      # If tav_from_dict is an int, the 'in' operator breaks, so convert
+      # filter_av and manually compare. See https://crbug.com/1019707
+      if isinstance(tav_from_dict, int):
+        return int(filter_av) == tav_from_dict
+      else:
+        return filter_av in tav_from_dict
+    elif isinstance(av, list):
+      return filter_av in av
+    return filter_av == av
+
+  filtered_tests = []
+  for t in tests:
+    # Gtest filtering
+    if not gtest_filter(t):
+      continue
+
+    # Enforce that all tests declare their size.
+    if not any(a in _VALID_ANNOTATIONS for a in t['annotations']):
+      raise MissingSizeAnnotationError(GetTestName(t))
+
+    if (not annotation_filter(t['annotations'])
+        or not excluded_annotation_filter(t['annotations'])):
+      continue
+
+    filtered_tests.append(t)
+
+  return filtered_tests
+
+
+# TODO(yolandyan): remove this once the tests are converted to junit4
+def GetAllTestsFromJar(test_jar):
+  pickle_path = '%s-proguard.pickle' % test_jar
+  try:
+    tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_jar))
+  except TestListPickleException as e:
+    logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests from JAR via proguard.')
+    tests = _GetTestsFromProguard(test_jar)
+    SaveTestsToPickle(pickle_path, tests)
+  return tests
+
+
+def GetAllTestsFromApk(test_apk):
+  pickle_path = '%s-dexdump.pickle' % test_apk
+  try:
+    tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_apk))
+  except TestListPickleException as e:
+    logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests from dex via dexdump.')
+    tests = _GetTestsFromDexdump(test_apk)
+    SaveTestsToPickle(pickle_path, tests)
+  return tests
+
+def GetTestsFromPickle(pickle_path, test_mtime):
+  if not os.path.exists(pickle_path):
+    raise TestListPickleException('%s does not exist.' % pickle_path)
+  if os.path.getmtime(pickle_path) <= test_mtime:
+    raise TestListPickleException('File is stale: %s' % pickle_path)
+
+  with open(pickle_path, 'r') as f:
+    pickle_data = pickle.load(f)
+  if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+    raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.')
+  return pickle_data['TEST_METHODS']
+
+
+# TODO(yolandyan): remove this once the test listing from java runner lands
+@instrumentation_tracing.no_tracing
+def _GetTestsFromProguard(jar_path):
+  p = proguard.Dump(jar_path)
+  class_lookup = dict((c['class'], c) for c in p['classes'])
+
+  def is_test_class(c):
+    return c['class'].endswith('Test')
+
+  def is_test_method(m):
+    return m['method'].startswith('test')
+
+  def recursive_class_annotations(c):
+    s = c['superclass']
+    if s in class_lookup:
+      a = recursive_class_annotations(class_lookup[s])
+    else:
+      a = {}
+    a.update(c['annotations'])
+    return a
+
+  def stripped_test_class(c):
+    return {
+      'class': c['class'],
+      'annotations': recursive_class_annotations(c),
+      'methods': [m for m in c['methods'] if is_test_method(m)],
+      'superclass': c['superclass'],
+    }
+
+  return [stripped_test_class(c) for c in p['classes']
+          if is_test_class(c)]
+
+
+def _GetTestsFromDexdump(test_apk):
+  dex_dumps = dexdump.Dump(test_apk)
+  tests = []
+
+  def get_test_methods(methods):
+    return [
+        {
+          'method': m,
+          # No annotation info is available from dexdump.
+          # Set MediumTest annotation for default.
+          'annotations': {'MediumTest': None},
+        } for m in methods if m.startswith('test')]
+
+  for dump in dex_dumps:
+    for package_name, package_info in dump.iteritems():
+      for class_name, class_info in package_info['classes'].iteritems():
+        if class_name.endswith('Test'):
+          tests.append({
+              'class': '%s.%s' % (package_name, class_name),
+              'annotations': {},
+              'methods': get_test_methods(class_info['methods']),
+              'superclass': class_info['superclass'],
+          })
+  return tests
+
+def SaveTestsToPickle(pickle_path, tests):
+  pickle_data = {
+    'VERSION': _PICKLE_FORMAT_VERSION,
+    'TEST_METHODS': tests,
+  }
+  with open(pickle_path, 'w') as pickle_file:
+    pickle.dump(pickle_data, pickle_file)
+
+
+class MissingJUnit4RunnerException(test_exception.TestException):
+  """Raised when JUnit4 runner is not provided or specified in apk manifest"""
+
+  def __init__(self):
+    super(MissingJUnit4RunnerException, self).__init__(
+        'JUnit4 runner is not provided or specified in test apk manifest.')
+
+
+def GetTestName(test, sep='#'):
+  """Gets the name of the given test.
+
+  Note that this may return the same name for more than one test, e.g. if a
+  test is being run multiple times with different parameters.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+  Returns:
+    The test name as a string.
+  """
+  test_name = '%s%s%s' % (test['class'], sep, test['method'])
+  assert ' *-:' not in test_name, (
+      'The test name must not contain any of the characters in " *-:". See '
+      'https://crbug.com/912199')
+  return test_name
+
+
+def GetTestNameWithoutParameterPostfix(
+      test, sep='#', parameterization_sep='__'):
+  """Gets the name of the given JUnit4 test without parameter postfix.
+
+  For most WebView JUnit4 javatests, each test is parameterizatized with
+  "__sandboxed_mode" to run in both non-sandboxed mode and sandboxed mode.
+
+  This function returns the name of the test without parameterization
+  so test filters can match both parameterized and non-parameterized tests.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+    parameterization_sep: the character(s) that seperate method name and method
+                          parameterization postfix.
+  Returns:
+    The test name without parameter postfix as a string.
+  """
+  name = GetTestName(test, sep=sep)
+  return name.split(parameterization_sep)[0]
+
+
+def GetUniqueTestName(test, sep='#'):
+  """Gets the unique name of the given test.
+
+  This will include text to disambiguate between tests for which GetTestName
+  would return the same name.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+  Returns:
+    The unique test name as a string.
+  """
+  display_name = GetTestName(test, sep=sep)
+  if test.get('flags', [None])[0]:
+    sanitized_flags = [x.replace('-', '_') for x in test['flags']]
+    display_name = '%s_with_%s' % (display_name, '_'.join(sanitized_flags))
+
+  assert ' *-:' not in display_name, (
+      'The test name must not contain any of the characters in " *-:". See '
+      'https://crbug.com/912199')
+
+  return display_name
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, data_deps_delegate, error_func):
+    super(InstrumentationTestInstance, self).__init__()
+
+    self._additional_apks = []
+    self._apk_under_test = None
+    self._apk_under_test_incremental_install_json = None
+    self._modules = None
+    self._fake_modules = None
+    self._additional_locales = None
+    self._package_info = None
+    self._suite = None
+    self._test_apk = None
+    self._test_apk_incremental_install_json = None
+    self._test_jar = None
+    self._test_package = None
+    self._junit3_runner_class = None
+    self._junit4_runner_class = None
+    self._junit4_runner_supports_listing = None
+    self._test_support_apk = None
+    self._initializeApkAttributes(args, error_func)
+
+    self._data_deps = None
+    self._data_deps_delegate = None
+    self._runtime_deps_path = None
+    self._initializeDataDependencyAttributes(args, data_deps_delegate)
+
+    self._annotations = None
+    self._excluded_annotations = None
+    self._test_filter = None
+    self._initializeTestFilterAttributes(args)
+
+    self._flags = None
+    self._use_apk_under_test_flags_file = False
+    self._initializeFlagAttributes(args)
+
+    self._screenshot_dir = None
+    self._timeout_scale = None
+    self._wait_for_java_debugger = None
+    self._initializeTestControlAttributes(args)
+
+    self._coverage_directory = None
+    self._jacoco_coverage_type = None
+    self._initializeTestCoverageAttributes(args)
+
+    self._store_tombstones = False
+    self._symbolizer = None
+    self._enable_java_deobfuscation = False
+    self._deobfuscator = None
+    self._initializeLogAttributes(args)
+
+    self._edit_shared_prefs = []
+    self._initializeEditPrefsAttributes(args)
+
+    self._replace_system_package = None
+    self._initializeReplaceSystemPackageAttributes(args)
+
+    self._system_packages_to_remove = None
+    self._initializeSystemPackagesToRemoveAttributes(args)
+
+    self._use_webview_provider = None
+    self._initializeUseWebviewProviderAttributes(args)
+
+    self._skia_gold_properties = None
+    self._initializeSkiaGoldAttributes(args)
+
+    self._wpr_enable_record = args.wpr_enable_record
+
+    self._external_shard_index = args.test_launcher_shard_index
+    self._total_external_shards = args.test_launcher_total_shards
+
+  def _initializeApkAttributes(self, args, error_func):
+    if args.apk_under_test:
+      apk_under_test_path = args.apk_under_test
+      if (not args.apk_under_test.endswith('.apk')
+          and not args.apk_under_test.endswith('.apks')):
+        apk_under_test_path = os.path.join(
+            constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+            '%s.apk' % args.apk_under_test)
+
+      # TODO(jbudorick): Move the realpath up to the argument parser once
+      # APK-by-name is no longer supported.
+      apk_under_test_path = os.path.realpath(apk_under_test_path)
+
+      if not os.path.exists(apk_under_test_path):
+        error_func('Unable to find APK under test: %s' % apk_under_test_path)
+
+      self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)
+
+    test_apk_path = args.test_apk
+    if not os.path.exists(test_apk_path):
+      test_apk_path = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.test_apk)
+      # TODO(jbudorick): Move the realpath up to the argument parser once
+      # APK-by-name is no longer supported.
+      test_apk_path = os.path.realpath(test_apk_path)
+
+    if not os.path.exists(test_apk_path):
+      error_func('Unable to find test APK: %s' % test_apk_path)
+
+    self._test_apk = apk_helper.ToHelper(test_apk_path)
+    self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+
+    self._apk_under_test_incremental_install_json = (
+        args.apk_under_test_incremental_install_json)
+    self._test_apk_incremental_install_json = (
+        args.test_apk_incremental_install_json)
+
+    if self._test_apk_incremental_install_json:
+      assert self._suite.endswith('_incremental')
+      self._suite = self._suite[:-len('_incremental')]
+
+    self._modules = args.modules
+    self._fake_modules = args.fake_modules
+    self._additional_locales = args.additional_locales
+
+    self._test_jar = args.test_jar
+    self._test_support_apk = apk_helper.ToHelper(os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%sSupport.apk' % self._suite))
+
+    if not self._test_jar:
+      logging.warning('Test jar not specified. Test runner will not have '
+                      'Java annotation info available. May not handle test '
+                      'timeouts correctly.')
+    elif not os.path.exists(self._test_jar):
+      error_func('Unable to find test JAR: %s' % self._test_jar)
+
+    self._test_package = self._test_apk.GetPackageName()
+    all_instrumentations = self._test_apk.GetAllInstrumentations()
+    all_junit3_runner_classes = [
+        x for x in all_instrumentations if ('0xffffffff' in x.get(
+            'chromium-junit3', ''))]
+    all_junit4_runner_classes = [
+        x for x in all_instrumentations if ('0xffffffff' not in x.get(
+            'chromium-junit3', ''))]
+
+    if len(all_junit3_runner_classes) > 1:
+      logging.warning('This test apk has more than one JUnit3 instrumentation')
+    if len(all_junit4_runner_classes) > 1:
+      logging.warning('This test apk has more than one JUnit4 instrumentation')
+
+    self._junit3_runner_class = (
+      all_junit3_runner_classes[0]['android:name']
+      if all_junit3_runner_classes else self.test_apk.GetInstrumentationName())
+
+    self._junit4_runner_class = (
+      all_junit4_runner_classes[0]['android:name']
+      if all_junit4_runner_classes else None)
+
+    if self._junit4_runner_class:
+      if self._test_apk_incremental_install_json:
+        self._junit4_runner_supports_listing = next(
+            (True for x in self._test_apk.GetAllMetadata()
+             if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS),
+            False)
+      else:
+        self._junit4_runner_supports_listing = (
+            self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS)
+
+    self._package_info = None
+    if self._apk_under_test:
+      package_under_test = self._apk_under_test.GetPackageName()
+      for package_info in constants.PACKAGE_INFO.itervalues():
+        if package_under_test == package_info.package:
+          self._package_info = package_info
+          break
+    if not self._package_info:
+      logging.warning(("Unable to find package info for %s. " +
+                       "(This may just mean that the test package is " +
+                       "currently being installed.)"),
+                       self._test_package)
+
+    for apk in args.additional_apks:
+      if not os.path.exists(apk):
+        error_func('Unable to find additional APK: %s' % apk)
+    self._additional_apks = (
+        [apk_helper.ToHelper(x) for x in args.additional_apks])
+
+  def _initializeDataDependencyAttributes(self, args, data_deps_delegate):
+    self._data_deps = []
+    self._data_deps_delegate = data_deps_delegate
+    self._runtime_deps_path = args.runtime_deps_path
+
+    if not self._runtime_deps_path:
+      logging.warning('No data dependencies will be pushed.')
+
+  def _initializeTestFilterAttributes(self, args):
+    self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+    def annotation_element(a):
+      a = a.split('=', 1)
+      return (a[0], a[1] if len(a) == 2 else None)
+
+    if args.annotation_str:
+      self._annotations = [
+          annotation_element(a) for a in args.annotation_str.split(',')]
+    elif not self._test_filter:
+      self._annotations = [
+          annotation_element(a) for a in _DEFAULT_ANNOTATIONS]
+    else:
+      self._annotations = []
+
+    if args.exclude_annotation_str:
+      self._excluded_annotations = [
+          annotation_element(a) for a in args.exclude_annotation_str.split(',')]
+    else:
+      self._excluded_annotations = []
+
+    requested_annotations = set(a[0] for a in self._annotations)
+    if not args.run_disabled:
+      self._excluded_annotations.extend(
+          annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS
+          if a not in requested_annotations)
+
+  def _initializeFlagAttributes(self, args):
+    self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file
+    self._flags = ['--enable-test-intents']
+    if args.command_line_flags:
+      self._flags.extend(args.command_line_flags)
+    if args.device_flags_file:
+      with open(args.device_flags_file) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend(flag for flag in stripped_lines if flag)
+    if args.strict_mode and args.strict_mode != 'off' and (
+        # TODO(yliuyliu): Turn on strict mode for coverage once
+        # crbug/1006397 is fixed.
+        not args.coverage_dir):
+      self._flags.append('--strict-mode=' + args.strict_mode)
+
+  def _initializeTestControlAttributes(self, args):
+    self._screenshot_dir = args.screenshot_dir
+    self._timeout_scale = args.timeout_scale or 1
+    self._wait_for_java_debugger = args.wait_for_java_debugger
+
+  def _initializeTestCoverageAttributes(self, args):
+    self._coverage_directory = args.coverage_dir
+    if ("Batch", "UnitTests") in self._annotations and (
+        "Batch", "UnitTests") not in self._excluded_annotations:
+      self._jacoco_coverage_type = "unit_tests_only"
+    elif ("Batch", "UnitTests") not in self._annotations and (
+        "Batch", "UnitTests") in self._excluded_annotations:
+      self._jacoco_coverage_type = "unit_tests_excluded"
+
+  def _initializeLogAttributes(self, args):
+    self._enable_java_deobfuscation = args.enable_java_deobfuscation
+    self._store_tombstones = args.store_tombstones
+    self._symbolizer = stack_symbolizer.Symbolizer(
+        self.apk_under_test.path if self.apk_under_test else None)
+
+  def _initializeEditPrefsAttributes(self, args):
+    if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file:
+      return
+    if not isinstance(args.shared_prefs_file, str):
+      logging.warning("Given non-string for a filepath")
+      return
+    self._edit_shared_prefs = shared_preference_utils.ExtractSettingsFromJson(
+        args.shared_prefs_file)
+
+  def _initializeReplaceSystemPackageAttributes(self, args):
+    if (not hasattr(args, 'replace_system_package')
+        or not args.replace_system_package):
+      return
+    self._replace_system_package = args.replace_system_package
+
+  def _initializeSystemPackagesToRemoveAttributes(self, args):
+    if (not hasattr(args, 'system_packages_to_remove')
+        or not args.system_packages_to_remove):
+      return
+    self._system_packages_to_remove = args.system_packages_to_remove
+
+  def _initializeUseWebviewProviderAttributes(self, args):
+    if (not hasattr(args, 'use_webview_provider')
+        or not args.use_webview_provider):
+      return
+    self._use_webview_provider = args.use_webview_provider
+
+  def _initializeSkiaGoldAttributes(self, args):
+    self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args)
+
+  @property
+  def additional_apks(self):
+    return self._additional_apks
+
+  @property
+  def apk_under_test(self):
+    return self._apk_under_test
+
+  @property
+  def apk_under_test_incremental_install_json(self):
+    return self._apk_under_test_incremental_install_json
+
+  @property
+  def modules(self):
+    return self._modules
+
+  @property
+  def fake_modules(self):
+    return self._fake_modules
+
+  @property
+  def additional_locales(self):
+    return self._additional_locales
+
+  @property
+  def coverage_directory(self):
+    return self._coverage_directory
+
+  @property
+  def edit_shared_prefs(self):
+    return self._edit_shared_prefs
+
+  @property
+  def external_shard_index(self):
+    return self._external_shard_index
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def jacoco_coverage_type(self):
+    return self._jacoco_coverage_type
+
+  @property
+  def junit3_runner_class(self):
+    return self._junit3_runner_class
+
+  @property
+  def junit4_runner_class(self):
+    return self._junit4_runner_class
+
+  @property
+  def junit4_runner_supports_listing(self):
+    return self._junit4_runner_supports_listing
+
+  @property
+  def package_info(self):
+    return self._package_info
+
+  @property
+  def replace_system_package(self):
+    return self._replace_system_package
+
+  @property
+  def use_webview_provider(self):
+    return self._use_webview_provider
+
+  @property
+  def screenshot_dir(self):
+    return self._screenshot_dir
+
+  @property
+  def skia_gold_properties(self):
+    return self._skia_gold_properties
+
+  @property
+  def store_tombstones(self):
+    return self._store_tombstones
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def symbolizer(self):
+    return self._symbolizer
+
+  @property
+  def system_packages_to_remove(self):
+    return self._system_packages_to_remove
+
+  @property
+  def test_apk(self):
+    return self._test_apk
+
+  @property
+  def test_apk_incremental_install_json(self):
+    return self._test_apk_incremental_install_json
+
+  @property
+  def test_jar(self):
+    return self._test_jar
+
+  @property
+  def test_support_apk(self):
+    return self._test_support_apk
+
+  @property
+  def test_package(self):
+    return self._test_package
+
+  @property
+  def timeout_scale(self):
+    return self._timeout_scale
+
+  @property
+  def total_external_shards(self):
+    return self._total_external_shards
+
+  @property
+  def use_apk_under_test_flags_file(self):
+    return self._use_apk_under_test_flags_file
+
+  @property
+  def wait_for_java_debugger(self):
+    return self._wait_for_java_debugger
+
+  @property
+  def wpr_record_mode(self):
+    return self._wpr_enable_record
+
+  @property
+  def wpr_replay_mode(self):
+    return not self._wpr_enable_record
+
+  #override
+  def TestType(self):
+    return 'instrumentation'
+
+  #override
+  def GetPreferredAbis(self):
+    # We could alternatively take the intersection of what they all support,
+    # but it should never be the case that they support different things.
+    apks = [self._test_apk, self._apk_under_test] + self._additional_apks
+    for apk in apks:
+      if apk:
+        ret = apk.GetAbis()
+        if ret:
+          return ret
+    return []
+
+  #override
+  def SetUp(self):
+    self._data_deps.extend(
+        self._data_deps_delegate(self._runtime_deps_path))
+    if self._enable_java_deobfuscation:
+      self._deobfuscator = deobfuscator.DeobfuscatorPool(
+          self.test_apk.path + '.mapping')
+
+  def GetDataDependencies(self):
+    return self._data_deps
+
+  def GetTests(self):
+    if self.test_jar:
+      raw_tests = GetAllTestsFromJar(self.test_jar)
+    else:
+      raw_tests = GetAllTestsFromApk(self.test_apk.path)
+    return self.ProcessRawTests(raw_tests)
+
+  def MaybeDeobfuscateLines(self, lines):
+    if not self._deobfuscator:
+      return lines
+    return self._deobfuscator.TransformLines(lines)
+
+  def ProcessRawTests(self, raw_tests):
+    inflated_tests = self._ParameterizeTestsWithFlags(
+        self._InflateTests(raw_tests))
+    if self._junit4_runner_class is None and any(
+        t['is_junit4'] for t in inflated_tests):
+      raise MissingJUnit4RunnerException()
+    filtered_tests = FilterTests(
+        inflated_tests, self._test_filter, self._annotations,
+        self._excluded_annotations)
+    if self._test_filter and not filtered_tests:
+      for t in inflated_tests:
+        logging.debug('  %s', GetUniqueTestName(t))
+      logging.warning('Unmatched Filter: %s', self._test_filter)
+    return filtered_tests
+
+  # pylint: disable=no-self-use
+  def _InflateTests(self, tests):
+    inflated_tests = []
+    for c in tests:
+      for m in c['methods']:
+        a = dict(c['annotations'])
+        a.update(m['annotations'])
+        inflated_tests.append({
+            'class': c['class'],
+            'method': m['method'],
+            'annotations': a,
+            # TODO(https://crbug.com/1084729): Remove is_junit4.
+            'is_junit4': True
+        })
+    return inflated_tests
+
+  def _ParameterizeTestsWithFlags(self, tests):
+
+    def _checkParameterization(annotations):
+      types = [
+          _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES,
+          _PARAMETERIZED_COMMAND_LINE_FLAGS,
+      ]
+      if types[0] in annotations and types[1] in annotations:
+        raise CommandLineParameterizationException(
+            'Multiple command-line parameterization types: {}.'.format(
+                ', '.join(types)))
+
+    def _switchesToFlags(switches):
+      return ['--{}'.format(s) for s in switches if s]
+
+    def _annotationToSwitches(clazz, methods):
+      if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES:
+        return [methods['value']]
+      elif clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS:
+        list_of_switches = []
+        for annotation in methods['value']:
+          for clazz, methods in annotation.iteritems():
+            list_of_switches += _annotationToSwitches(clazz, methods)
+        return list_of_switches
+      else:
+        return []
+
+    def _setTestFlags(test, flags):
+      if flags:
+        test['flags'] = flags
+      elif 'flags' in test:
+        del test['flags']
+
+    new_tests = []
+    for t in tests:
+      annotations = t['annotations']
+      list_of_switches = []
+      _checkParameterization(annotations)
+      if _SKIP_PARAMETERIZATION not in annotations:
+        for clazz, methods in annotations.iteritems():
+          list_of_switches += _annotationToSwitches(clazz, methods)
+      if list_of_switches:
+        _setTestFlags(t, _switchesToFlags(list_of_switches[0]))
+        for p in list_of_switches[1:]:
+          parameterized_t = copy.copy(t)
+          _setTestFlags(parameterized_t, _switchesToFlags(p))
+          new_tests.append(parameterized_t)
+    return tests + new_tests
+
+  @staticmethod
+  def ParseAmInstrumentRawOutput(raw_output):
+    return ParseAmInstrumentRawOutput(raw_output)
+
+  @staticmethod
+  def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+                          device_abi, symbolizer):
+    return GenerateTestResults(result_code, result_bundle, statuses,
+                               duration_ms, device_abi, symbolizer)
+
+  #override
+  def TearDown(self):
+    self.symbolizer.CleanUp()
+    if self._deobfuscator:
+      self._deobfuscator.Close()
+      self._deobfuscator = None
diff --git a/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000..77918bb
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,1187 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for instrumentation_test_instance."""
+
+# pylint: disable=protected-access
+
+import collections
+import tempfile
+import unittest
+
+from pylib.base import base_test_result
+from pylib.instrumentation import instrumentation_test_instance
+
+import mock  # pylint: disable=import-error
+
+_INSTRUMENTATION_TEST_INSTANCE_PATH = (
+    'pylib.instrumentation.instrumentation_test_instance.%s')
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+  def setUp(self):
+    options = mock.Mock()
+    options.tool = ''
+
+  @staticmethod
+  def createTestInstance():
+    c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance'
+    # yapf: disable
+    with mock.patch('%s._initializeApkAttributes' % c), (
+         mock.patch('%s._initializeDataDependencyAttributes' % c)), (
+         mock.patch('%s._initializeTestFilterAttributes' %c)), (
+         mock.patch('%s._initializeFlagAttributes' % c)), (
+         mock.patch('%s._initializeTestControlAttributes' % c)), (
+         mock.patch('%s._initializeTestCoverageAttributes' % c)), (
+         mock.patch('%s._initializeSkiaGoldAttributes' % c)):
+      # yapf: enable
+      return instrumentation_test_instance.InstrumentationTestInstance(
+          mock.MagicMock(), mock.MagicMock(), lambda s: None)
+
+  _FlagAttributesArgs = collections.namedtuple('_FlagAttributesArgs', [
+      'command_line_flags', 'device_flags_file', 'strict_mode',
+      'use_apk_under_test_flags_file', 'coverage_dir'
+  ])
+
+  def createFlagAttributesArgs(self,
+                               command_line_flags=None,
+                               device_flags_file=None,
+                               strict_mode=None,
+                               use_apk_under_test_flags_file=False,
+                               coverage_dir=None):
+    return self._FlagAttributesArgs(command_line_flags, device_flags_file,
+                                    strict_mode, use_apk_under_test_flags_file,
+                                    coverage_dir)
+
+  def test_initializeFlagAttributes_commandLineFlags(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar'])
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+  def test_initializeFlagAttributes_deviceFlagsFile(self):
+    o = self.createTestInstance()
+    with tempfile.NamedTemporaryFile() as flags_file:
+      flags_file.write('\n'.join(['--foo', '--bar']))
+      flags_file.flush()
+
+      args = self.createFlagAttributesArgs(device_flags_file=flags_file.name)
+      o._initializeFlagAttributes(args)
+      self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+  def test_initializeFlagAttributes_strictModeOn(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(strict_mode='on')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+
+  def test_initializeFlagAttributes_strictModeOn_coverageOn(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(
+        strict_mode='on', coverage_dir='/coverage/dir')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents'])
+
+  def test_initializeFlagAttributes_strictModeOff(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(strict_mode='off')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents'])
+
+  def testGetTests_noFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'MediumTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod2',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_simpleGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_simpleGtestUnqualifiedNameFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_parameterizedTestGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1__sandboxed_mode',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1__sandboxed_mode',
+        'is_junit4': True,
+      },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_wildcardGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'org.chromium.test.SampleTest2.*'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_negativeGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'MediumTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod2',
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = '*-org.chromium.test.SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._annotations = [('SmallTest', None)]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_excludedAnnotationFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Foo']
+                },
+                'MediumTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod2',
+        },
+    ]
+
+    o._excluded_annotations = [('SmallTest', None)]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationSimpleValueFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {
+              'SmallTest': None,
+              'TestValue': '1',
+            },
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {
+              'MediumTest': None,
+              'TestValue': '2',
+            },
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {
+              'SmallTest': None,
+              'TestValue': '3',
+            },
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Foo']
+                },
+                'SmallTest': None,
+                'TestValue': '1',
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod1',
+        },
+    ]
+
+    o._annotations = [('TestValue', '1')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationDictValueFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._annotations = [('Feature', 'Bar')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTestName(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None',
+                 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA',
+      'is_junit4': True,
+      'method': 'testSimple'}
+    unqualified_class_test = {
+      'class': test['class'].split('.')[-1],
+      'method': test['method']
+    }
+
+    self.assertEquals(
+        instrumentation_test_instance.GetTestName(test, sep='.'),
+        'org.chromium.TestA.testSimple')
+    self.assertEquals(
+        instrumentation_test_instance.GetTestName(
+            unqualified_class_test, sep='.'),
+        'TestA.testSimple')
+
+  def testGetUniqueTestName(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA',
+      'flags': ['enable_features=abc'],
+      'is_junit4': True,
+      'method': 'testSimple'}
+    self.assertEquals(
+        instrumentation_test_instance.GetUniqueTestName(
+            test, sep='.'),
+        'org.chromium.TestA.testSimple_with_enable_features=abc')
+
+  def testGetTestNameWithoutParameterPostfix(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA__sandbox_mode',
+      'flags': 'enable_features=abc',
+      'is_junit4': True,
+      'method': 'testSimple'}
+    unqualified_class_test = {
+      'class': test['class'].split('.')[-1],
+      'method': test['method']
+    }
+    self.assertEquals(
+        instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+            test, sep='.'),
+        'org.chromium.TestA')
+    self.assertEquals(
+        instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+            unqualified_class_test, sep='.'),
+        'TestA')
+
+  def testGetTests_multipleAnnotationValuesRequested(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {
+              'Feature': {'value': ['Baz']},
+              'MediumTest': None,
+            },
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Baz']
+                },
+                'MediumTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod2',
+        },
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Bar']
+                },
+                'SmallTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest2',
+            'is_junit4': True,
+            'method': 'testMethod1',
+        },
+    ]
+
+    o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGenerateTestResults_noStatus(self):
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, [], 1000, None, None)
+    self.assertEqual([], results)
+
+  def testGenerateTestResults_testPassed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'true',
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_false(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'false',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testFailed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-2, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+  def testGenerateTestResults_testUnknownException(self):
+    stacktrace = 'long\nstacktrace'
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+        'stack': stacktrace,
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+    self.assertEqual(stacktrace, results[0].GetLog())
+
+  def testGenerateJUnitTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-3, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testParameterizedCommandLineFlagsSwitches(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {
+            'ParameterizedCommandLineFlags$Switches': {
+                'value': ['enable-features=abc', 'enable-features=def']
+            }
+        },
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None
+                },
+                'method': 'testMethod1',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': ['enable-features=ghi', 'enable-features=jkl']
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': []
+                    },
+                },
+                'method': 'testMethod3',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'SkipCommandLineParameterization': None,
+                },
+                'method': 'testMethod4',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=abc', '--enable-features=def'],
+            'is_junit4': True,
+            'method': 'testMethod1'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=ghi', '--enable-features=jkl'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod4'
+        },
+    ]
+    for i in range(4):
+      expected_tests[i]['annotations'].update(raw_tests[0]['annotations'])
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testParameterizedCommandLineFlags(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {
+            'ParameterizedCommandLineFlags': {
+                'value': [
+                    {
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc',
+                                'force-fieldtrials=trial/group'
+                            ],
+                        }
+                    },
+                    {
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc2',
+                                'force-fieldtrials=trial/group2'
+                            ],
+                        }
+                    },
+                ],
+            },
+        },
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None
+                },
+                'method': 'testMethod1',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [{
+                            'ParameterizedCommandLineFlags$Switches': {
+                                'value': ['enable-features=def']
+                            }
+                        }],
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [],
+                    },
+                },
+                'method': 'testMethod3',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'SkipCommandLineParameterization': None,
+                },
+                'method': 'testMethod4',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags':
+            ['--enable-features=abc', '--force-fieldtrials=trial/group'],
+            'is_junit4': True,
+            'method': 'testMethod1'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=def'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod4'
+        },
+        {
+            'annotations': {},
+            'class':
+            'org.chromium.test.SampleTest',
+            'flags': [
+                '--enable-features=abc2',
+                '--force-fieldtrials=trial/group2',
+            ],
+            'is_junit4':
+            True,
+            'method':
+            'testMethod1'
+        },
+    ]
+    for i in range(4):
+      expected_tests[i]['annotations'].update(raw_tests[0]['annotations'])
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+    expected_tests[4]['annotations'].update(raw_tests[0]['annotations'])
+    expected_tests[4]['annotations'].update(
+        raw_tests[0]['methods'][0]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testDifferentCommandLineParameterizations(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {},
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [
+                            {
+                                'ParameterizedCommandLineFlags$Switches': {
+                                    'value': ['a1', 'a2'],
+                                }
+                            },
+                        ],
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'SmallTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': ['b1', 'b2'],
+                    },
+                },
+                'method': 'testMethod3',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--a1', '--a2'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--b1', '--b2'],
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+    ]
+    for i in range(2):
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testMultipleCommandLineParameterizations_raises(self):
+    o = self.createTestInstance()
+    raw_tests = [
+        {
+            'annotations': {
+                'ParameterizedCommandLineFlags': {
+                    'value': [
+                        {
+                            'ParameterizedCommandLineFlags$Switches': {
+                                'value': [
+                                    'enable-features=abc',
+                                    'force-fieldtrials=trial/group',
+                                ],
+                            }
+                        },
+                    ],
+                },
+            },
+            'class':
+            'org.chromium.test.SampleTest',
+            'superclass':
+            'java.lang.Object',
+            'methods': [
+                {
+                    'annotations': {
+                        'SmallTest': None,
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc',
+                                'force-fieldtrials=trial/group',
+                            ],
+                        },
+                    },
+                    'method': 'testMethod1',
+                },
+            ],
+        },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    self.assertRaises(
+        instrumentation_test_instance.CommandLineParameterizationException,
+        o.ProcessRawTests, [raw_tests[0]])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/json_perf_parser.py b/src/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000..c647890
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = [j for j in json_data if EntryFilter(j)]
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0:
+    result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/src/build/android/pylib/instrumentation/render_test.html.jinja b/src/build/android/pylib/instrumentation/render_test.html.jinja
new file mode 100644
index 0000000..81b85b7
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/render_test.html.jinja
@@ -0,0 +1,40 @@
+<html>
+<head>
+  <title>{{ test_name }}</title>
+  <script>
+  function toggleZoom() {
+    for (const img of document.getElementsByTagName("img")) {
+      if (img.hasAttribute('style')) {
+        img.removeAttribute('style');
+      } else {
+        img.style.width = '100%';
+      }
+    }
+  }
+  </script>
+</head>
+<body>
+  <a href="https://cs.chromium.org/search/?q={{ test_name }}&m=100&type=cs">Link to Golden (in repo)</a><br />
+  <a download="{{ test_name }}" href="{{ failure_link }}">Download Failure Image (right click and 'Save link as')</a>
+  <table>
+    <thead>
+      <tr>
+        <th>Failure</th>
+        <th>Golden</th>
+        <th>Diff</th>
+      </tr>
+    </thead>
+    <tbody style="vertical-align: top">
+      <tr onclick="toggleZoom()">
+        <td><img src="{{ failure_link }}" style="width: 100%" /></td>
+        {% if golden_link %}
+        <td><img src="{{ golden_link }}" style="width: 100%" /></td>
+        <td><img src="{{ diff_link }}" style="width: 100%" /></td>
+        {% else %}
+        <td>No Golden Image.</td>
+        {% endif %}
+      </tr>
+    </tbody>
+  </table>
+</body>
+</html>
diff --git a/src/build/android/pylib/instrumentation/test_result.py b/src/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000..a1c7307
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+  """Result information for a single instrumentation test."""
+
+  def __init__(self, full_name, test_type, dur, log=''):
+    """Construct an InstrumentationTestResult object.
+
+    Args:
+      full_name: Full name of the test.
+      test_type: Type of the test result as defined in ResultType.
+      dur: Duration of the test run in milliseconds.
+      log: A string listing any errors.
+    """
+    super(InstrumentationTestResult, self).__init__(
+        full_name, test_type, dur, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self._test_name = name_pieces[1]
+      self._class_name = name_pieces[0]
+    else:
+      self._class_name = full_name
+      self._test_name = full_name
+
+  def SetDuration(self, duration):
+    """Set the test duration."""
+    self._duration = duration
diff --git a/src/build/android/pylib/junit/__init__.py b/src/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/junit/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/junit/junit_test_instance.py b/src/build/android/pylib/junit/junit_test_instance.py
new file mode 100644
index 0000000..a3d18e0
--- /dev/null
+++ b/src/build/android/pylib/junit/junit_test_instance.py
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.utils import test_filter
+
+
+class JunitTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, _):
+    super(JunitTestInstance, self).__init__()
+
+    self._coverage_dir = args.coverage_dir
+    self._debug_socket = args.debug_socket
+    self._coverage_on_the_fly = args.coverage_on_the_fly
+    self._package_filter = args.package_filter
+    self._resource_apk = args.resource_apk
+    self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir
+    self._runner_filter = args.runner_filter
+    self._shards = args.shards
+    self._test_filter = test_filter.InitializeFilterFromArgs(args)
+    self._test_suite = args.test_suite
+
+  #override
+  def TestType(self):
+    return 'junit'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
+
+  @property
+  def coverage_dir(self):
+    return self._coverage_dir
+
+  @property
+  def coverage_on_the_fly(self):
+    return self._coverage_on_the_fly
+
+  @property
+  def debug_socket(self):
+    return self._debug_socket
+
+  @property
+  def package_filter(self):
+    return self._package_filter
+
+  @property
+  def resource_apk(self):
+    return self._resource_apk
+
+  @property
+  def robolectric_runtime_deps_dir(self):
+    return self._robolectric_runtime_deps_dir
+
+  @property
+  def runner_filter(self):
+    return self._runner_filter
+
+  @property
+  def test_filter(self):
+    return self._test_filter
+
+  @property
+  def shards(self):
+    return self._shards
+
+  @property
+  def suite(self):
+    return self._test_suite
diff --git a/src/build/android/pylib/local/__init__.py b/src/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/device/__init__.py b/src/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/device/local_device_environment.py b/src/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000..d2a9077a
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,328 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import datetime
+import functools
+import logging
+import os
+import shutil
+import tempfile
+import threading
+
+import devil_chromium
+from devil import base_error
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import logcat_monitor
+from devil.android.sdk import adb_wrapper
+from devil.utils import file_utils
+from devil.utils import parallelizer
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import environment
+from pylib.utils import instrumentation_tracing
+from py_trace_event import trace_event
+
+
+LOGCAT_FILTERS = [
+  'chromium:v',
+  'cr_*:v',
+  'DEBUG:I',
+  'StrictMode:D',
+]
+
+
+def _DeviceCachePath(device):
+  file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+  return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def handle_shard_failures(f):
+  """A decorator that handles device failures for per-device functions.
+
+  Args:
+    f: the function being decorated. The function must take at least one
+      argument, and that argument must be the device.
+  """
+  return handle_shard_failures_with(None)(f)
+
+
+# TODO(jbudorick): Refactor this to work as a decorator or context manager.
+def handle_shard_failures_with(on_failure):
+  """A decorator that handles device failures for per-device functions.
+
+  This calls on_failure in the event of a failure.
+
+  Args:
+    f: the function being decorated. The function must take at least one
+      argument, and that argument must be the device.
+    on_failure: A binary function to call on failure.
+  """
+  def decorator(f):
+    @functools.wraps(f)
+    def wrapper(dev, *args, **kwargs):
+      try:
+        return f(dev, *args, **kwargs)
+      except device_errors.CommandTimeoutError:
+        logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev))
+      except device_errors.DeviceUnreachableError:
+        logging.exception('Shard died: %s(%s)', f.__name__, str(dev))
+      except base_error.BaseError:
+        logging.exception('Shard failed: %s(%s)', f.__name__, str(dev))
+      except SystemExit:
+        logging.exception('Shard killed: %s(%s)', f.__name__, str(dev))
+        raise
+      if on_failure:
+        on_failure(dev, f.__name__)
+      return None
+
+    return wrapper
+
+  return decorator
+
+
+def place_nomedia_on_device(dev, device_root):
+  """Places .nomedia file in test data root.
+
+  This helps to prevent system from scanning media files inside test data.
+
+  Args:
+    dev: Device to place .nomedia file.
+    device_root: Base path on device to place .nomedia file.
+  """
+
+  dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+  dev.WriteFile('%s/.nomedia' % device_root, 'https://crbug.com/796640')
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+  def __init__(self, args, output_manager, _error_func):
+    super(LocalDeviceEnvironment, self).__init__(output_manager)
+    self._current_try = 0
+    self._denylist = (device_denylist.Denylist(args.denylist_file)
+                      if args.denylist_file else None)
+    self._device_serials = args.test_devices
+    self._devices_lock = threading.Lock()
+    self._devices = None
+    self._concurrent_adb = args.enable_concurrent_adb
+    self._enable_device_cache = args.enable_device_cache
+    self._logcat_monitors = []
+    self._logcat_output_dir = args.logcat_output_dir
+    self._logcat_output_file = args.logcat_output_file
+    self._max_tries = 1 + args.num_retries
+    self._preferred_abis = None
+    self._recover_devices = args.recover_devices
+    self._skip_clear_data = args.skip_clear_data
+    self._tool_name = args.tool
+    self._trace_output = None
+    if hasattr(args, 'trace_output'):
+      self._trace_output = args.trace_output
+    self._trace_all = None
+    if hasattr(args, 'trace_all'):
+      self._trace_all = args.trace_all
+
+    devil_chromium.Initialize(
+        output_directory=constants.GetOutDirectory(),
+        adb_path=args.adb_path)
+
+    # Some things such as Forwarder require ADB to be in the environment path,
+    # while others like Devil's bundletool.py require Java on the path.
+    adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath())
+    if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+      os.environ['PATH'] = os.pathsep.join(
+          [adb_dir, host_paths.JAVA_PATH, os.environ['PATH']])
+
+  #override
+  def SetUp(self):
+    if self.trace_output and self._trace_all:
+      to_include = [r"pylib\..*", r"devil\..*", "__main__"]
+      to_exclude = ["logging"]
+      instrumentation_tracing.start_instrumenting(self.trace_output, to_include,
+                                                  to_exclude)
+    elif self.trace_output:
+      self.EnableTracing()
+
+  # Must be called before accessing |devices|.
+  def SetPreferredAbis(self, abis):
+    assert self._devices is None
+    self._preferred_abis = abis
+
+  def _InitDevices(self):
+    device_arg = []
+    if self._device_serials:
+      device_arg = self._device_serials
+
+    self._devices = device_utils.DeviceUtils.HealthyDevices(
+        self._denylist,
+        retries=5,
+        enable_usb_resets=True,
+        enable_device_files_cache=self._enable_device_cache,
+        default_retries=self._max_tries - 1,
+        device_arg=device_arg,
+        abis=self._preferred_abis)
+
+    if self._logcat_output_file:
+      self._logcat_output_dir = tempfile.mkdtemp()
+
+    @handle_shard_failures_with(on_failure=self.DenylistDevice)
+    def prepare_device(d):
+      d.WaitUntilFullyBooted()
+
+      if self._enable_device_cache:
+        cache_path = _DeviceCachePath(d)
+        if os.path.exists(cache_path):
+          logging.info('Using device cache: %s', cache_path)
+          with open(cache_path) as f:
+            d.LoadCacheData(f.read())
+          # Delete cached file so that any exceptions cause it to be cleared.
+          os.unlink(cache_path)
+
+      if self._logcat_output_dir:
+        logcat_file = os.path.join(
+            self._logcat_output_dir,
+            '%s_%s' % (d.adb.GetDeviceSerial(),
+                       datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S')))
+        monitor = logcat_monitor.LogcatMonitor(
+            d.adb, clear=True, output_file=logcat_file)
+        self._logcat_monitors.append(monitor)
+        monitor.Start()
+
+    self.parallel_devices.pMap(prepare_device)
+
+  @property
+  def current_try(self):
+    return self._current_try
+
+  def IncrementCurrentTry(self):
+    self._current_try += 1
+
+  def ResetCurrentTry(self):
+    self._current_try = 0
+
+  @property
+  def denylist(self):
+    return self._denylist
+
+  @property
+  def concurrent_adb(self):
+    return self._concurrent_adb
+
+  @property
+  def devices(self):
+    # Initialize lazily so that host-only tests do not fail when no devices are
+    # attached.
+    if self._devices is None:
+      self._InitDevices()
+    return self._devices
+
+  @property
+  def max_tries(self):
+    return self._max_tries
+
+  @property
+  def parallel_devices(self):
+    return parallelizer.SyncParallelizer(self.devices)
+
+  @property
+  def recover_devices(self):
+    return self._recover_devices
+
+  @property
+  def skip_clear_data(self):
+    return self._skip_clear_data
+
+  @property
+  def tool(self):
+    return self._tool_name
+
+  @property
+  def trace_output(self):
+    return self._trace_output
+
+  #override
+  def TearDown(self):
+    if self.trace_output and self._trace_all:
+      instrumentation_tracing.stop_instrumenting()
+    elif self.trace_output:
+      self.DisableTracing()
+
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    if not self._devices:
+      return
+
+    @handle_shard_failures_with(on_failure=self.DenylistDevice)
+    def tear_down_device(d):
+      # Write the cache even when not using it so that it will be ready the
+      # first time that it is enabled. Writing it every time is also necessary
+      # so that an invalid cache can be flushed just by disabling it for one
+      # run.
+      cache_path = _DeviceCachePath(d)
+      if os.path.exists(os.path.dirname(cache_path)):
+        with open(cache_path, 'w') as f:
+          f.write(d.DumpCacheData())
+          logging.info('Wrote device cache: %s', cache_path)
+      else:
+        logging.warning(
+            'Unable to write device cache as %s directory does not exist',
+            os.path.dirname(cache_path))
+
+    self.parallel_devices.pMap(tear_down_device)
+
+    for m in self._logcat_monitors:
+      try:
+        m.Stop()
+        m.Close()
+        _, temp_path = tempfile.mkstemp()
+        with open(m.output_file, 'r') as infile:
+          with open(temp_path, 'w') as outfile:
+            for line in infile:
+              outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line))
+        shutil.move(temp_path, m.output_file)
+      except base_error.BaseError:
+        logging.exception('Failed to stop logcat monitor for %s',
+                          m.adb.GetDeviceSerial())
+      except IOError:
+        logging.exception('Failed to locate logcat for device %s',
+                          m.adb.GetDeviceSerial())
+
+    if self._logcat_output_file:
+      file_utils.MergeFiles(
+          self._logcat_output_file,
+          [m.output_file for m in self._logcat_monitors
+           if os.path.exists(m.output_file)])
+      shutil.rmtree(self._logcat_output_dir)
+
+  def DenylistDevice(self, device, reason='local_device_failure'):
+    device_serial = device.adb.GetDeviceSerial()
+    if self._denylist:
+      self._denylist.Extend([device_serial], reason=reason)
+    with self._devices_lock:
+      self._devices = [d for d in self._devices if str(d) != device_serial]
+    logging.error('Device %s denylisted: %s', device_serial, reason)
+    if not self._devices:
+      raise device_errors.NoDevicesError(
+          'All devices were denylisted due to errors')
+
+  @staticmethod
+  def DisableTracing():
+    if not trace_event.trace_is_enabled():
+      logging.warning('Tracing is not running.')
+    else:
+      trace_event.trace_disable()
+
+  def EnableTracing(self):
+    if trace_event.trace_is_enabled():
+      logging.warning('Tracing is already running.')
+    else:
+      trace_event.trace_enable(self._trace_output)
diff --git a/src/build/android/pylib/local/device/local_device_gtest_run.py b/src/build/android/pylib/local/device/local_device_gtest_run.py
new file mode 100644
index 0000000..753556d
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_gtest_run.py
@@ -0,0 +1,891 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import collections
+import itertools
+import logging
+import math
+import os
+import posixpath
+import subprocess
+import shutil
+import time
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import logcat_monitor
+from devil.android import ports
+from devil.android.sdk import version_codes
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import google_storage_helper
+from pylib.utils import logdog_helper
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+_MAX_INLINE_FLAGS_LENGTH = 50  # Arbitrarily chosen.
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTest.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+    'org.chromium.native_test.NativeTest.CommandLineFlags')
+_EXTRA_COVERAGE_DEVICE_FILE = (
+    'org.chromium.native_test.NativeTest.CoverageDeviceFile')
+_EXTRA_STDOUT_FILE = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.StdoutFile')
+_EXTRA_TEST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.Test')
+_EXTRA_TEST_LIST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.TestList')
+
+_SECONDS_TO_NANOS = int(1e9)
+
+# Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the
+# host machine.
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+  'components_browsertests', 'content_unittests', 'content_browsertests',
+  'net_unittests', 'services_unittests', 'unit_tests'
+]
+
+# These are use for code coverage.
+_LLVM_PROFDATA_PATH = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                                   'llvm-build', 'Release+Asserts', 'bin',
+                                   'llvm-profdata')
+# Name of the file extension for profraw data files.
+_PROFRAW_FILE_EXTENSION = 'profraw'
+# Name of the file where profraw data files are merged.
+_MERGE_PROFDATA_FILE_NAME = 'coverage_merged.' + _PROFRAW_FILE_EXTENSION
+
+# No-op context manager. If we used Python 3, we could change this to
+# contextlib.ExitStack()
+class _NullContextManager(object):
+  def __enter__(self):
+    pass
+  def __exit__(self, *args):
+    pass
+
+
+def _GenerateSequentialFileNames(filename):
+  """Infinite generator of names: 'name.ext', 'name_1.ext', 'name_2.ext', ..."""
+  yield filename
+  base, ext = os.path.splitext(filename)
+  for i in itertools.count(1):
+    yield '%s_%d%s' % (base, i, ext)
+
+
+def _ExtractTestsFromFilter(gtest_filter):
+  """Returns the list of tests specified by the given filter.
+
+  Returns:
+    None if the device should be queried for the test list instead.
+  """
+  # Empty means all tests, - means exclude filter.
+  if not gtest_filter or '-' in gtest_filter:
+    return None
+
+  patterns = gtest_filter.split(':')
+  # For a single pattern, allow it even if it has a wildcard so long as the
+  # wildcard comes at the end and there is at least one . to prove the scope is
+  # not too large.
+  # This heuristic is not necessarily faster, but normally is.
+  if len(patterns) == 1 and patterns[0].endswith('*'):
+    no_suffix = patterns[0].rstrip('*')
+    if '*' not in no_suffix and '.' in no_suffix:
+      return patterns
+
+  if '*' in gtest_filter:
+    return None
+  return patterns
+
+
+def _GetDeviceTimeoutMultiplier():
+  # Emulated devices typically run 20-150x slower than real-time.
+  # Give a way to control this through the DEVICE_TIMEOUT_MULTIPLIER
+  # environment variable.
+  multiplier = os.getenv("DEVICE_TIMEOUT_MULTIPLIER")
+  if multiplier:
+    return int(multiplier)
+  return 1
+
+
+def _MergeCoverageFiles(coverage_dir, profdata_dir):
+  """Merge coverage data files.
+
+  Each instrumentation activity generates a separate profraw data file. This
+  merges all profraw files in profdata_dir into a single file in
+  coverage_dir. This happens after each test, rather than waiting until after
+  all tests are ran to reduce the memory footprint used by all the profraw
+  files.
+
+  Args:
+    coverage_dir: The path to the coverage directory.
+    profdata_dir: The directory where the profraw data file(s) are located.
+
+  Return:
+    None
+  """
+  # profdata_dir may not exist if pulling coverage files failed.
+  if not os.path.exists(profdata_dir):
+    logging.debug('Profraw directory does not exist.')
+    return
+
+  merge_file = os.path.join(coverage_dir, _MERGE_PROFDATA_FILE_NAME)
+  profraw_files = [
+      os.path.join(profdata_dir, f) for f in os.listdir(profdata_dir)
+      if f.endswith(_PROFRAW_FILE_EXTENSION)
+  ]
+
+  try:
+    logging.debug('Merging target profraw files into merged profraw file.')
+    subprocess_cmd = [
+        _LLVM_PROFDATA_PATH,
+        'merge',
+        '-o',
+        merge_file,
+        '-sparse=true',
+    ]
+    # Grow the merge file by merging it with itself and the new files.
+    if os.path.exists(merge_file):
+      subprocess_cmd.append(merge_file)
+    subprocess_cmd.extend(profraw_files)
+    output = subprocess.check_output(subprocess_cmd)
+    logging.debug('Merge output: %s', output)
+  except subprocess.CalledProcessError:
+    # Don't raise error as that will kill the test run. When code coverage
+    # generates a report, that will raise the error in the report generation.
+    logging.error(
+        'Failed to merge target profdata files to create merged profraw file.')
+
+  # Free up memory space on bot as all data is in the merge file.
+  for f in profraw_files:
+    os.remove(f)
+
+
+def _PullCoverageFiles(device, device_coverage_dir, output_dir):
+  """Pulls coverage files on device to host directory.
+
+  Args:
+    device: The working device.
+    device_coverage_dir: The directory to store coverage data on device.
+    output_dir: The output directory on host.
+  """
+  try:
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    device.PullFile(device_coverage_dir, output_dir)
+    if not os.listdir(os.path.join(output_dir, 'profraw')):
+      logging.warning('No coverage data was generated for this run')
+  except (OSError, base_error.BaseError) as e:
+    logging.warning('Failed to handle coverage data after tests: %s', e)
+  finally:
+    device.RemovePath(device_coverage_dir, force=True, recursive=True)
+
+
+def _GetDeviceCoverageDir(device):
+  """Gets the directory to generate coverage data on device.
+
+  Args:
+    device: The working device.
+
+  Returns:
+    The directory path on the device.
+  """
+  return posixpath.join(device.GetExternalStoragePath(), 'chrome', 'test',
+                        'coverage', 'profraw')
+
+
+def _GetLLVMProfilePath(device_coverage_dir, suite, coverage_index):
+  """Gets 'LLVM_PROFILE_FILE' environment variable path.
+
+  Dumping data to ONLY 1 file may cause warning and data overwrite in
+  browsertests, so that pattern "%2m" is used to expand to 2 raw profiles
+  at runtime.
+
+  Args:
+    device_coverage_dir: The directory to generate data on device.
+    suite: Test suite name.
+    coverage_index: The incremental index for this test suite.
+
+  Returns:
+    The path pattern for environment variable 'LLVM_PROFILE_FILE'.
+  """
+  return posixpath.join(device_coverage_dir,
+                        '_'.join([suite,
+                                  str(coverage_index), '%2m.profraw']))
+
+
+class _ApkDelegate(object):
+  def __init__(self, test_instance, tool):
+    self._activity = test_instance.activity
+    self._apk_helper = test_instance.apk_helper
+    self._test_apk_incremental_install_json = (
+        test_instance.test_apk_incremental_install_json)
+    self._package = test_instance.package
+    self._runner = test_instance.runner
+    self._permissions = test_instance.permissions
+    self._suite = test_instance.suite
+    self._component = '%s/%s' % (self._package, self._runner)
+    self._extras = test_instance.extras
+    self._wait_for_java_debugger = test_instance.wait_for_java_debugger
+    self._tool = tool
+    self._coverage_dir = test_instance.coverage_dir
+    self._coverage_index = 0
+
+  def GetTestDataRoot(self, device):
+    # pylint: disable=no-self-use
+    return posixpath.join(device.GetExternalStoragePath(),
+                          'chromium_tests_root')
+
+  def Install(self, device):
+    if self._test_apk_incremental_install_json:
+      installer.Install(device, self._test_apk_incremental_install_json,
+                        apk=self._apk_helper, permissions=self._permissions)
+    else:
+      device.Install(
+          self._apk_helper,
+          allow_downgrade=True,
+          reinstall=True,
+          permissions=self._permissions)
+
+  def ResultsDirectory(self, device):
+    return device.GetApplicationDataDirectory(self._package)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    extras = dict(self._extras)
+    device_api = device.build_version_sdk
+
+    if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+      device_coverage_dir = _GetDeviceCoverageDir(device)
+      extras[_EXTRA_COVERAGE_DEVICE_FILE] = _GetLLVMProfilePath(
+          device_coverage_dir, self._suite, self._coverage_index)
+      self._coverage_index += 1
+
+    if ('timeout' in kwargs
+        and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras):
+      # Make sure the instrumentation doesn't kill the test before the
+      # scripts do. The provided timeout value is in seconds, but the
+      # instrumentation deals with nanoseconds because that's how Android
+      # handles time.
+      extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int(
+          kwargs['timeout'] * _SECONDS_TO_NANOS)
+
+    # pylint: disable=redefined-variable-type
+    command_line_file = _NullContextManager()
+    if flags:
+      if len(flags) > _MAX_INLINE_FLAGS_LENGTH:
+        command_line_file = device_temp_file.DeviceTempFile(device.adb)
+        device.WriteFile(command_line_file.name, '_ %s' % flags)
+        extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+      else:
+        extras[_EXTRA_COMMAND_LINE_FLAGS] = flags
+
+    test_list_file = _NullContextManager()
+    if test:
+      if len(test) > 1:
+        test_list_file = device_temp_file.DeviceTempFile(device.adb)
+        device.WriteFile(test_list_file.name, '\n'.join(test))
+        extras[_EXTRA_TEST_LIST] = test_list_file.name
+      else:
+        extras[_EXTRA_TEST] = test[0]
+    # pylint: enable=redefined-variable-type
+
+    # We need to use GetAppWritablePath here instead of GetExternalStoragePath
+    # since we will not have yet applied legacy storage permission workarounds
+    # on R+.
+    stdout_file = device_temp_file.DeviceTempFile(
+        device.adb, dir=device.GetAppWritablePath(), suffix='.gtest_out')
+    extras[_EXTRA_STDOUT_FILE] = stdout_file.name
+
+    if self._wait_for_java_debugger:
+      cmd = ['am', 'set-debug-app', '-w', self._package]
+      device.RunShellCommand(cmd, check_return=True)
+      logging.warning('*' * 80)
+      logging.warning('Waiting for debugger to attach to process: %s',
+                      self._package)
+      logging.warning('*' * 80)
+
+    with command_line_file, test_list_file, stdout_file:
+      try:
+        device.StartInstrumentation(
+            self._component, extras=extras, raw=False, **kwargs)
+      except device_errors.CommandFailedError:
+        logging.exception('gtest shard failed.')
+      except device_errors.CommandTimeoutError:
+        logging.exception('gtest shard timed out.')
+      except device_errors.DeviceUnreachableError:
+        logging.exception('gtest shard device unreachable.')
+      except Exception:
+        device.ForceStop(self._package)
+        raise
+      finally:
+        if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+          if not os.path.isdir(self._coverage_dir):
+            os.makedirs(self._coverage_dir)
+        # TODO(crbug.com/1179004) Use _MergeCoverageFiles when llvm-profdata
+        # not found is fixed.
+          _PullCoverageFiles(
+              device, device_coverage_dir,
+              os.path.join(self._coverage_dir, str(self._coverage_index)))
+
+      return device.ReadFile(stdout_file.name).splitlines()
+
+  def PullAppFiles(self, device, files, directory):
+    device_dir = device.GetApplicationDataDirectory(self._package)
+    host_dir = os.path.join(directory, str(device))
+    for f in files:
+      device_file = posixpath.join(device_dir, f)
+      host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+      for host_file in _GenerateSequentialFileNames(host_file):
+        if not os.path.exists(host_file):
+          break
+      device.PullFile(device_file, host_file)
+
+  def Clear(self, device):
+    device.ClearApplicationState(self._package, permissions=self._permissions)
+
+
+class _ExeDelegate(object):
+
+  def __init__(self, tr, test_instance, tool):
+    self._host_dist_dir = test_instance.exe_dist_dir
+    self._exe_file_name = os.path.basename(
+        test_instance.exe_dist_dir)[:-len('__dist')]
+    self._device_dist_dir = posixpath.join(
+        constants.TEST_EXECUTABLE_DIR,
+        os.path.basename(test_instance.exe_dist_dir))
+    self._test_run = tr
+    self._tool = tool
+    self._suite = test_instance.suite
+    self._coverage_dir = test_instance.coverage_dir
+    self._coverage_index = 0
+
+  def GetTestDataRoot(self, device):
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return posixpath.join(constants.TEST_EXECUTABLE_DIR, 'chromium_tests_root')
+
+  def Install(self, device):
+    # TODO(jbudorick): Look into merging this with normal data deps pushing if
+    # executables become supported on nonlocal environments.
+    device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)],
+                            delete_device_stale=True)
+
+  def ResultsDirectory(self, device):
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return constants.TEST_EXECUTABLE_DIR
+
+  def Run(self, test, device, flags=None, **kwargs):
+    tool = self._test_run.GetTool(device).GetTestWrapper()
+    if tool:
+      cmd = [tool]
+    else:
+      cmd = []
+    cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name))
+
+    if test:
+      cmd.append('--gtest_filter=%s' % ':'.join(test))
+    if flags:
+      # TODO(agrieve): This won't work if multiple flags are passed.
+      cmd.append(flags)
+    cwd = constants.TEST_EXECUTABLE_DIR
+
+    env = {
+      'LD_LIBRARY_PATH': self._device_dist_dir
+    }
+
+    if self._coverage_dir:
+      device_coverage_dir = _GetDeviceCoverageDir(device)
+      env['LLVM_PROFILE_FILE'] = _GetLLVMProfilePath(
+          device_coverage_dir, self._suite, self._coverage_index)
+      self._coverage_index += 1
+
+    if self._tool != 'asan':
+      env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS
+
+    try:
+      gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      external = device.GetExternalStoragePath()
+      env['GCOV_PREFIX'] = '%s/gcov' % external
+      env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+    except (device_errors.CommandFailedError, KeyError):
+      pass
+
+    # Executable tests return a nonzero exit code on test failure, which is
+    # fine from the test runner's perspective; thus check_return=False.
+    output = device.RunShellCommand(
+        cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
+
+    if self._coverage_dir:
+      _PullCoverageFiles(
+          device, device_coverage_dir,
+          os.path.join(self._coverage_dir, str(self._coverage_index)))
+
+    return output
+
+  def PullAppFiles(self, device, files, directory):
+    pass
+
+  def Clear(self, device):
+    device.KillAll(self._exe_file_name,
+                   blocking=True,
+                   timeout=30 * _GetDeviceTimeoutMultiplier(),
+                   quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+  def __init__(self, env, test_instance):
+    assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+    assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+    super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+    if self._test_instance.apk_helper:
+      self._installed_packages = [
+          self._test_instance.apk_helper.GetPackageName()
+      ]
+
+    # pylint: disable=redefined-variable-type
+    if self._test_instance.apk:
+      self._delegate = _ApkDelegate(self._test_instance, env.tool)
+    elif self._test_instance.exe_dist_dir:
+      self._delegate = _ExeDelegate(self, self._test_instance, self._env.tool)
+    if self._test_instance.isolated_script_test_perf_output:
+      self._test_perf_output_filenames = _GenerateSequentialFileNames(
+          self._test_instance.isolated_script_test_perf_output)
+    else:
+      self._test_perf_output_filenames = itertools.repeat(None)
+    # pylint: enable=redefined-variable-type
+    self._crashes = set()
+    self._servers = collections.defaultdict(list)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    @local_device_environment.handle_shard_failures_with(
+        on_failure=self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_set_up(device, host_device_tuples):
+      def install_apk(dev):
+        # Install test APK.
+        self._delegate.Install(dev)
+
+      def push_test_data(dev):
+        # Push data dependencies.
+        device_root = self._delegate.GetTestDataRoot(dev)
+        host_device_tuples_substituted = [
+            (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+            for h, d in host_device_tuples]
+        local_device_environment.place_nomedia_on_device(dev, device_root)
+        dev.PushChangedFiles(
+            host_device_tuples_substituted,
+            delete_device_stale=True,
+            # Some gtest suites, e.g. unit_tests, have data dependencies that
+            # can take longer than the default timeout to push. See
+            # crbug.com/791632 for context.
+            timeout=600 * math.ceil(_GetDeviceTimeoutMultiplier() / 10))
+        if not host_device_tuples:
+          dev.RemovePath(device_root, force=True, recursive=True, rename=True)
+          dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+      def init_tool_and_start_servers(dev):
+        tool = self.GetTool(dev)
+        tool.CopyFiles(dev)
+        tool.SetupEnvironment()
+
+        try:
+          # See https://crbug.com/1030827.
+          # This is a hack that may break in the future. We're relying on the
+          # fact that adb doesn't use ipv6 for it's server, and so doesn't
+          # listen on ipv6, but ssh remote forwarding does. 5037 is the port
+          # number adb uses for its server.
+          if "[::1]:5037" in subprocess.check_output(
+              "ss -o state listening 'sport = 5037'", shell=True):
+            logging.error(
+                'Test Server cannot be started with a remote-forwarded adb '
+                'server. Continuing anyways, but some tests may fail.')
+            return
+        except subprocess.CalledProcessError:
+          pass
+
+        self._servers[str(dev)] = []
+        if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+          self._servers[str(dev)].append(
+              local_test_server_spawner.LocalTestServerSpawner(
+                  ports.AllocateTestServerPort(), dev, tool))
+
+        for s in self._servers[str(dev)]:
+          s.SetUp()
+
+      def bind_crash_handler(step, dev):
+        return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+      # Explicitly enable root to ensure that tests run under deterministic
+      # conditions. Without this explicit call, EnableRoot() is called from
+      # push_test_data() when PushChangedFiles() determines that it should use
+      # _PushChangedFilesZipped(), which is only most of the time.
+      # Root is required (amongst maybe other reasons) to pull the results file
+      # from the device, since it lives within the application's data directory
+      # (via GetApplicationDataDirectory()).
+      device.EnableRoot()
+
+      steps = [
+          bind_crash_handler(s, device)
+          for s in (install_apk, push_test_data, init_tool_and_start_servers)]
+      if self._env.concurrent_adb:
+        reraiser_thread.RunAsync(steps)
+      else:
+        for step in steps:
+          step()
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  #override
+  def _CreateShards(self, tests):
+    # _crashes are tests that might crash and make the tests in the same shard
+    # following the crashed testcase not run.
+    # Thus we need to create separate shards for each crashed testcase,
+    # so that other tests can be run.
+    device_count = len(self._env.devices)
+    shards = []
+
+    # Add shards with only one suspect testcase.
+    shards += [[crash] for crash in self._crashes if crash in tests]
+
+    # Delete suspect testcase from tests.
+    tests = [test for test in tests if not test in self._crashes]
+
+    max_shard_size = self._test_instance.test_launcher_batch_limit
+
+    shards.extend(self._PartitionTests(tests, device_count, max_shard_size))
+    return shards
+
+  #override
+  def _GetTests(self):
+    if self._test_instance.extract_test_list_from_filter:
+      # When the exact list of tests to run is given via command-line (e.g. when
+      # locally iterating on a specific test), skip querying the device (which
+      # takes ~3 seconds).
+      tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter)
+      if tests:
+        return tests
+
+    # Even when there's only one device, it still makes sense to retrieve the
+    # test list so that tests can be split up and run in batches rather than all
+    # at once (since test output is not streamed).
+    @local_device_environment.handle_shard_failures_with(
+        on_failure=self._env.DenylistDevice)
+    def list_tests(dev):
+      timeout = 30 * _GetDeviceTimeoutMultiplier()
+      retries = 1
+      if self._test_instance.wait_for_java_debugger:
+        timeout = None
+
+      flags = [
+          f for f in self._test_instance.flags
+          if f not in ['--wait-for-debugger', '--wait-for-java-debugger']
+      ]
+      flags.append('--gtest_list_tests')
+
+      # TODO(crbug.com/726880): Remove retries when no longer necessary.
+      for i in range(0, retries+1):
+        logging.info('flags:')
+        for f in flags:
+          logging.info('  %s', f)
+
+        with self._ArchiveLogcat(dev, 'list_tests'):
+          raw_test_list = crash_handler.RetryOnSystemCrash(
+              lambda d: self._delegate.Run(
+                  None, d, flags=' '.join(flags), timeout=timeout),
+              device=dev)
+
+        tests = gtest_test_instance.ParseGTestListTests(raw_test_list)
+        if not tests:
+          logging.info('No tests found. Output:')
+          for l in raw_test_list:
+            logging.info('  %s', l)
+          if i < retries:
+            logging.info('Retrying...')
+        else:
+          break
+      return tests
+
+    # Query all devices in case one fails.
+    test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+    # If all devices failed to list tests, raise an exception.
+    # Check that tl is not None and is not empty.
+    if all(not tl for tl in test_lists):
+      raise device_errors.CommandFailedError(
+          'Failed to list tests on any device')
+    tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+    tests = self._test_instance.FilterTests(tests)
+    tests = self._ApplyExternalSharding(
+        tests, self._test_instance.external_shard_index,
+        self._test_instance.total_external_shards)
+    return tests
+
+  def _UploadTestArtifacts(self, device, test_artifacts_dir):
+    # TODO(jbudorick): Reconcile this with the output manager once
+    # https://codereview.chromium.org/2933993002/ lands.
+    if test_artifacts_dir:
+      with tempfile_ext.NamedTemporaryDirectory() as test_artifacts_host_dir:
+        device.PullFile(test_artifacts_dir.name, test_artifacts_host_dir)
+        with tempfile_ext.NamedTemporaryDirectory() as temp_zip_dir:
+          zip_base_name = os.path.join(temp_zip_dir, 'test_artifacts')
+          test_artifacts_zip = shutil.make_archive(
+              zip_base_name, 'zip', test_artifacts_host_dir)
+          link = google_storage_helper.upload(
+              google_storage_helper.unique_name(
+                  'test_artifacts', device=device),
+              test_artifacts_zip,
+              bucket='%s/test_artifacts' % (
+                  self._test_instance.gs_test_artifacts_bucket))
+          logging.info('Uploading test artifacts to %s.', link)
+          return link
+    return None
+
+  def _PullRenderTestOutput(self, device, render_test_output_device_dir):
+    # We pull the render tests into a temp directory then copy them over
+    # individually. Otherwise we end up with a temporary directory name
+    # in the host output directory.
+    with tempfile_ext.NamedTemporaryDirectory() as tmp_host_dir:
+      try:
+        device.PullFile(render_test_output_device_dir, tmp_host_dir)
+      except device_errors.CommandFailedError:
+        logging.exception('Failed to pull render test output dir %s',
+                          render_test_output_device_dir)
+      temp_host_dir = os.path.join(
+          tmp_host_dir, os.path.basename(render_test_output_device_dir))
+      for output_file in os.listdir(temp_host_dir):
+        src_path = os.path.join(temp_host_dir, output_file)
+        dst_path = os.path.join(self._test_instance.render_test_output_dir,
+                                output_file)
+        shutil.move(src_path, dst_path)
+
+  @contextlib.contextmanager
+  def _ArchiveLogcat(self, device, test):
+    if isinstance(test, str):
+      desc = test
+    else:
+      desc = hash(tuple(test))
+
+    stream_name = 'logcat_%s_shard%s_%s_%s' % (
+        desc, self._test_instance.external_shard_index,
+        time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+
+    logcat_file = None
+    logmon = None
+    try:
+      with self._env.output_manager.ArchivedTempfile(stream_name,
+                                                     'logcat') as logcat_file:
+        with logcat_monitor.LogcatMonitor(
+            device.adb,
+            filter_specs=local_device_environment.LOGCAT_FILTERS,
+            output_file=logcat_file.name,
+            check_error=False) as logmon:
+          with contextlib_ext.Optional(trace_event.trace(str(test)),
+                                       self._env.trace_output):
+            yield logcat_file
+    finally:
+      if logmon:
+        logmon.Close()
+      if logcat_file and logcat_file.Link():
+        logging.info('Logcat saved to %s', logcat_file.Link())
+
+  #override
+  def _RunTest(self, device, test):
+    # Run the test.
+    timeout = (self._test_instance.shard_timeout *
+               self.GetTool(device).GetTimeoutScale() *
+               _GetDeviceTimeoutMultiplier())
+    if self._test_instance.wait_for_java_debugger:
+      timeout = None
+    if self._test_instance.store_tombstones:
+      tombstones.ClearAllTombstones(device)
+    test_perf_output_filename = next(self._test_perf_output_filenames)
+
+    if self._test_instance.isolated_script_test_output:
+      suffix = '.json'
+    else:
+      suffix = '.xml'
+
+    with device_temp_file.DeviceTempFile(
+        adb=device.adb,
+        dir=self._delegate.ResultsDirectory(device),
+        suffix=suffix) as device_tmp_results_file:
+      with contextlib_ext.Optional(
+          device_temp_file.NamedDeviceTemporaryDirectory(
+              adb=device.adb, dir='/sdcard/'),
+          self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir:
+        with (contextlib_ext.Optional(
+            device_temp_file.DeviceTempFile(
+                adb=device.adb, dir=self._delegate.ResultsDirectory(device)),
+            test_perf_output_filename)) as isolated_script_test_perf_output:
+          with contextlib_ext.Optional(
+              device_temp_file.NamedDeviceTemporaryDirectory(adb=device.adb,
+                                                             dir='/sdcard/'),
+              self._test_instance.render_test_output_dir
+          ) as render_test_output_dir:
+
+            flags = list(self._test_instance.flags)
+            if self._test_instance.enable_xml_result_parsing:
+              flags.append('--gtest_output=xml:%s' %
+                           device_tmp_results_file.name)
+
+            if self._test_instance.gs_test_artifacts_bucket:
+              flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name)
+
+            if self._test_instance.isolated_script_test_output:
+              flags.append('--isolated-script-test-output=%s' %
+                           device_tmp_results_file.name)
+
+            if test_perf_output_filename:
+              flags.append('--isolated_script_test_perf_output=%s' %
+                           isolated_script_test_perf_output.name)
+
+            if self._test_instance.render_test_output_dir:
+              flags.append('--render-test-output-dir=%s' %
+                           render_test_output_dir.name)
+
+            logging.info('flags:')
+            for f in flags:
+              logging.info('  %s', f)
+
+            with self._ArchiveLogcat(device, test) as logcat_file:
+              output = self._delegate.Run(test,
+                                          device,
+                                          flags=' '.join(flags),
+                                          timeout=timeout,
+                                          retries=0)
+
+            if self._test_instance.enable_xml_result_parsing:
+              try:
+                gtest_xml = device.ReadFile(device_tmp_results_file.name)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull gtest results XML file %s',
+                                  device_tmp_results_file.name)
+                gtest_xml = None
+
+            if self._test_instance.isolated_script_test_output:
+              try:
+                gtest_json = device.ReadFile(device_tmp_results_file.name)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull gtest results JSON file %s',
+                                  device_tmp_results_file.name)
+                gtest_json = None
+
+            if test_perf_output_filename:
+              try:
+                device.PullFile(isolated_script_test_perf_output.name,
+                                test_perf_output_filename)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull chartjson results %s',
+                                  isolated_script_test_perf_output.name)
+
+            test_artifacts_url = self._UploadTestArtifacts(
+                device, test_artifacts_dir)
+
+            if render_test_output_dir:
+              self._PullRenderTestOutput(device, render_test_output_dir.name)
+
+    for s in self._servers[str(device)]:
+      s.Reset()
+    if self._test_instance.app_files:
+      self._delegate.PullAppFiles(device, self._test_instance.app_files,
+                                  self._test_instance.app_file_dir)
+    if not self._env.skip_clear_data:
+      self._delegate.Clear(device)
+
+    for l in output:
+      logging.info(l)
+
+    # Parse the output.
+    # TODO(jbudorick): Transition test scripts away from parsing stdout.
+    if self._test_instance.enable_xml_result_parsing:
+      results = gtest_test_instance.ParseGTestXML(gtest_xml)
+    elif self._test_instance.isolated_script_test_output:
+      results = gtest_test_instance.ParseGTestJSON(gtest_json)
+    else:
+      results = gtest_test_instance.ParseGTestOutput(
+          output, self._test_instance.symbolizer, device.product_cpu_abi)
+
+    tombstones_url = None
+    for r in results:
+      if logcat_file:
+        r.SetLink('logcat', logcat_file.Link())
+
+      if self._test_instance.gs_test_artifacts_bucket:
+        r.SetLink('test_artifacts', test_artifacts_url)
+
+      if r.GetType() == base_test_result.ResultType.CRASH:
+        self._crashes.add(r.GetName())
+        if self._test_instance.store_tombstones:
+          if not tombstones_url:
+            resolved_tombstones = tombstones.ResolveTombstones(
+                device,
+                resolve_all_tombstones=True,
+                include_stack_symbols=False,
+                wipe_tombstones=True)
+            stream_name = 'tombstones_%s_%s' % (
+                time.strftime('%Y%m%dT%H%M%S', time.localtime()),
+                device.serial)
+            tombstones_url = logdog_helper.text(
+                stream_name, '\n'.join(resolved_tombstones))
+          r.SetLink('tombstones', tombstones_url)
+
+    tests_stripped_disabled_prefix = set()
+    for t in test:
+      tests_stripped_disabled_prefix.add(
+          gtest_test_instance.TestNameWithoutDisabledPrefix(t))
+    not_run_tests = tests_stripped_disabled_prefix.difference(
+        set(r.GetName() for r in results))
+    return results, list(not_run_tests) if results else None
+
+  #override
+  def TearDown(self):
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    @local_device_environment.handle_shard_failures
+    @trace_event.traced
+    def individual_device_tear_down(dev):
+      for s in self._servers.get(str(dev), []):
+        s.TearDown()
+
+      tool = self.GetTool(dev)
+      tool.CleanUpEnvironment()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
diff --git a/src/build/android/pylib/local/device/local_device_gtest_run_test.py b/src/build/android/pylib/local/device/local_device_gtest_run_test.py
new file mode 100755
index 0000000..b08b24b
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_gtest_run_test.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env vpython
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for local_device_gtest_test_run."""
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import os
+import tempfile
+import unittest
+
+from pylib.gtest import gtest_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from py_utils import tempfile_ext
+
+import mock  # pylint: disable=import-error
+
+
+class LocalDeviceGtestRunTest(unittest.TestCase):
+  def setUp(self):
+    self._obj = local_device_gtest_run.LocalDeviceGtestRun(
+        mock.MagicMock(spec=local_device_environment.LocalDeviceEnvironment),
+        mock.MagicMock(spec=gtest_test_instance.GtestTestInstance))
+
+  def testExtractTestsFromFilter(self):
+    # Checks splitting by colons.
+    self.assertEqual([
+        'b17',
+        'm4e3',
+        'p51',
+    ], local_device_gtest_run._ExtractTestsFromFilter('b17:m4e3:p51'))
+    # Checks the '-' sign.
+    self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('-mk2'))
+    # Checks the more than one asterick.
+    self.assertIsNone(
+        local_device_gtest_run._ExtractTestsFromFilter('.mk2*:.M67*'))
+    # Checks just an asterick without a period
+    self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('M67*'))
+    # Checks an asterick at the end with a period.
+    self.assertEqual(['.M67*'],
+                     local_device_gtest_run._ExtractTestsFromFilter('.M67*'))
+
+  def testGetLLVMProfilePath(self):
+    path = local_device_gtest_run._GetLLVMProfilePath('test_dir', 'sr71', '5')
+    self.assertEqual(path, os.path.join('test_dir', 'sr71_5_%2m.profraw'))
+
+  @mock.patch('subprocess.check_output')
+  def testMergeCoverageFiles(self, mock_sub):
+    with tempfile_ext.NamedTemporaryDirectory() as cov_tempd:
+      pro_tempd = os.path.join(cov_tempd, 'profraw')
+      os.mkdir(pro_tempd)
+      profdata = tempfile.NamedTemporaryFile(
+          dir=pro_tempd,
+          delete=False,
+          suffix=local_device_gtest_run._PROFRAW_FILE_EXTENSION)
+      local_device_gtest_run._MergeCoverageFiles(cov_tempd, pro_tempd)
+      # Merged file should be deleted.
+      self.assertFalse(os.path.exists(profdata.name))
+      self.assertTrue(mock_sub.called)
+
+  @mock.patch('pylib.utils.google_storage_helper.upload')
+  def testUploadTestArtifacts(self, mock_gsh):
+    link = self._obj._UploadTestArtifacts(mock.MagicMock(), None)
+    self.assertFalse(mock_gsh.called)
+    self.assertIsNone(link)
+
+    result = 'A/10/warthog/path'
+    mock_gsh.return_value = result
+    with tempfile_ext.NamedTemporaryFile() as temp_f:
+      link = self._obj._UploadTestArtifacts(mock.MagicMock(), temp_f)
+    self.assertTrue(mock_gsh.called)
+    self.assertEqual(result, link)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000..7f16d6a
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,1471 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import copy
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import time
+
+from devil import base_error
+from devil.android import apk_helper
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import flag_changer
+from devil.android.sdk import shared_prefs
+from devil.android import logcat_monitor
+from devil.android.tools import system_app
+from devil.android.tools import webview_app
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import output_manager
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.output import remote_output_manager
+from pylib.utils import chrome_proxy_utils
+from pylib.utils import gold_utils
+from pylib.utils import instrumentation_tracing
+from pylib.utils import shared_preference_utils
+from py_trace_event import trace_event
+from py_trace_event import trace_time
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+with host_paths.SysPath(
+    os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0):
+  import jinja2  # pylint: disable=import-error
+  import markupsafe  # pylint: disable=import-error,unused-import
+
+
+_JINJA_TEMPLATE_DIR = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation')
+_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja'
+
+_WPR_GO_LINUX_X86_64_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                         'third_party', 'webpagereplay', 'bin',
+                                         'linux', 'x86_64', 'wpr')
+
+_TAG = 'test_runner_py'
+
+TIMEOUT_ANNOTATIONS = [
+    ('Manual', 10 * 60 * 60),
+    ('IntegrationTest', 10 * 60),
+    ('External', 10 * 60),
+    ('EnormousTest', 5 * 60),
+    ('LargeTest', 2 * 60),
+    ('MediumTest', 30),
+    ('SmallTest', 10),
+]
+
+# Account for Instrumentation and process init overhead.
+FIXED_TEST_TIMEOUT_OVERHEAD = 60
+
+# 30 minute max timeout for an instrumentation invocation to avoid shard
+# timeouts when tests never finish. The shard timeout is currently 60 minutes,
+# so this needs to be less than that.
+MAX_BATCH_TEST_TIMEOUT = 30 * 60
+
+LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v', 'DEBUG:I',
+                  'StrictMode:D', '%s:I' % _TAG]
+
+EXTRA_SCREENSHOT_FILE = (
+    'org.chromium.base.test.ScreenshotOnFailureStatement.ScreenshotFile')
+
+EXTRA_UI_CAPTURE_DIR = (
+    'org.chromium.base.test.util.Screenshooter.ScreenshotDir')
+
+EXTRA_TRACE_FILE = ('org.chromium.base.test.BaseJUnit4ClassRunner.TraceFile')
+
+_EXTRA_TEST_LIST = (
+    'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList')
+
+_EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.'
+                             'ChromeUiApplicationTestRule.PackageUnderTest')
+
+FEATURE_ANNOTATION = 'Feature'
+RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest'
+WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory'
+WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest'
+
+_DEVICE_GOLD_DIR = 'skia_gold'
+# A map of Android product models to SDK ints.
+RENDER_TEST_MODEL_SDK_CONFIGS = {
+    # Android x86 emulator.
+    'Android SDK built for x86': [23],
+}
+
+_BATCH_SUFFIX = '_batch'
+_TEST_BATCH_MAX_GROUP_SIZE = 256
+
+
+@contextlib.contextmanager
+def _LogTestEndpoints(device, test_name):
+  device.RunShellCommand(
+      ['log', '-p', 'i', '-t', _TAG, 'START %s' % test_name],
+      check_return=True)
+  try:
+    yield
+  finally:
+    device.RunShellCommand(
+        ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name],
+        check_return=True)
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner
+# is deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+  # Dismiss any error dialogs. Limit the number in case we have an error
+  # loop or we are failing to dismiss.
+  try:
+    for _ in xrange(10):
+      package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1)
+      if not package:
+        return False
+      # Assume test package convention of ".test" suffix
+      if package in package_name:
+        return True
+  except device_errors.CommandFailedError:
+    logging.exception('Error while attempting to dismiss crash dialog.')
+  return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+    r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+def _GetTargetPackageName(test_apk):
+  # apk_under_test does not work for smoke tests, where it is set to an
+  # apk that is not listed as the targetPackage in the test apk's manifest.
+  return test_apk.GetAllInstrumentations()[0]['android:targetPackage']
+
+
+class LocalDeviceInstrumentationTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceInstrumentationTestRun, self).__init__(
+        env, test_instance)
+    self._chrome_proxy = None
+    self._context_managers = collections.defaultdict(list)
+    self._flag_changers = {}
+    self._render_tests_device_output_dir = None
+    self._shared_prefs_to_restore = []
+    self._skia_gold_session_manager = None
+    self._skia_gold_work_dir = None
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    target_package = _GetTargetPackageName(self._test_instance.test_apk)
+
+    @local_device_environment.handle_shard_failures_with(
+        self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_set_up(device, host_device_tuples):
+      steps = []
+
+      if self._test_instance.replace_system_package:
+        @trace_event.traced
+        def replace_package(dev):
+          # We need the context manager to be applied before modifying any
+          # shared preference files in case the replacement APK needs to be
+          # set up, and it needs to be applied while the test is running.
+          # Thus, it needs to be applied early during setup, but must still be
+          # applied during _RunTest, which isn't possible using 'with' without
+          # applying the context manager up in test_runner. Instead, we
+          # manually invoke its __enter__ and __exit__ methods in setup and
+          # teardown.
+          system_app_context = system_app.ReplaceSystemApp(
+              dev, self._test_instance.replace_system_package.package,
+              self._test_instance.replace_system_package.replacement_apk)
+          # Pylint is not smart enough to realize that this field has
+          # an __enter__ method, and will complain loudly.
+          # pylint: disable=no-member
+          system_app_context.__enter__()
+          # pylint: enable=no-member
+          self._context_managers[str(dev)].append(system_app_context)
+
+        steps.append(replace_package)
+
+      if self._test_instance.system_packages_to_remove:
+
+        @trace_event.traced
+        def remove_packages(dev):
+          logging.info('Attempting to remove system packages %s',
+                       self._test_instance.system_packages_to_remove)
+          system_app.RemoveSystemApps(
+              dev, self._test_instance.system_packages_to_remove)
+          logging.info('Done removing system packages')
+
+        # This should be at the front in case we're removing the package to make
+        # room for another APK installation later on. Since we disallow
+        # concurrent adb with this option specified, this should be safe.
+        steps.insert(0, remove_packages)
+
+      if self._test_instance.use_webview_provider:
+        @trace_event.traced
+        def use_webview_provider(dev):
+          # We need the context manager to be applied before modifying any
+          # shared preference files in case the replacement APK needs to be
+          # set up, and it needs to be applied while the test is running.
+          # Thus, it needs to be applied early during setup, but must still be
+          # applied during _RunTest, which isn't possible using 'with' without
+          # applying the context manager up in test_runner. Instead, we
+          # manually invoke its __enter__ and __exit__ methods in setup and
+          # teardown.
+          webview_context = webview_app.UseWebViewProvider(
+              dev, self._test_instance.use_webview_provider)
+          # Pylint is not smart enough to realize that this field has
+          # an __enter__ method, and will complain loudly.
+          # pylint: disable=no-member
+          webview_context.__enter__()
+          # pylint: enable=no-member
+          self._context_managers[str(dev)].append(webview_context)
+
+        steps.append(use_webview_provider)
+
+      def install_helper(apk,
+                         modules=None,
+                         fake_modules=None,
+                         permissions=None,
+                         additional_locales=None):
+
+        @instrumentation_tracing.no_tracing
+        @trace_event.traced
+        def install_helper_internal(d, apk_path=None):
+          # pylint: disable=unused-argument
+          d.Install(apk,
+                    modules=modules,
+                    fake_modules=fake_modules,
+                    permissions=permissions,
+                    additional_locales=additional_locales)
+
+        return install_helper_internal
+
+      def incremental_install_helper(apk, json_path, permissions):
+
+        @trace_event.traced
+        def incremental_install_helper_internal(d, apk_path=None):
+          # pylint: disable=unused-argument
+          installer.Install(d, json_path, apk=apk, permissions=permissions)
+        return incremental_install_helper_internal
+
+      permissions = self._test_instance.test_apk.GetPermissions()
+      if self._test_instance.test_apk_incremental_install_json:
+        steps.append(incremental_install_helper(
+                         self._test_instance.test_apk,
+                         self._test_instance.
+                             test_apk_incremental_install_json,
+                         permissions))
+      else:
+        steps.append(
+            install_helper(
+                self._test_instance.test_apk, permissions=permissions))
+
+      steps.extend(
+          install_helper(apk) for apk in self._test_instance.additional_apks)
+
+      # We'll potentially need the package names later for setting app
+      # compatibility workarounds.
+      for apk in (self._test_instance.additional_apks +
+                  [self._test_instance.test_apk]):
+        self._installed_packages.append(apk_helper.GetPackageName(apk))
+
+      # The apk under test needs to be installed last since installing other
+      # apks after will unintentionally clear the fake module directory.
+      # TODO(wnwen): Make this more robust, fix crbug.com/1010954.
+      if self._test_instance.apk_under_test:
+        self._installed_packages.append(
+            apk_helper.GetPackageName(self._test_instance.apk_under_test))
+        permissions = self._test_instance.apk_under_test.GetPermissions()
+        if self._test_instance.apk_under_test_incremental_install_json:
+          steps.append(
+              incremental_install_helper(
+                  self._test_instance.apk_under_test,
+                  self._test_instance.apk_under_test_incremental_install_json,
+                  permissions))
+        else:
+          steps.append(
+              install_helper(self._test_instance.apk_under_test,
+                             self._test_instance.modules,
+                             self._test_instance.fake_modules, permissions,
+                             self._test_instance.additional_locales))
+
+      @trace_event.traced
+      def set_debug_app(dev):
+        # Set debug app in order to enable reading command line flags on user
+        # builds
+        cmd = ['am', 'set-debug-app', '--persistent']
+        if self._test_instance.wait_for_java_debugger:
+          cmd.append('-w')
+        cmd.append(target_package)
+        dev.RunShellCommand(cmd, check_return=True)
+
+      @trace_event.traced
+      def edit_shared_prefs(dev):
+        for setting in self._test_instance.edit_shared_prefs:
+          shared_pref = shared_prefs.SharedPrefs(
+              dev, setting['package'], setting['filename'],
+              use_encrypted_path=setting.get('supports_encrypted_path', False))
+          pref_to_restore = copy.copy(shared_pref)
+          pref_to_restore.Load()
+          self._shared_prefs_to_restore.append(pref_to_restore)
+
+          shared_preference_utils.ApplySharedPreferenceSetting(
+              shared_pref, setting)
+
+      @trace_event.traced
+      def set_vega_permissions(dev):
+        # Normally, installation of VrCore automatically grants storage
+        # permissions. However, since VrCore is part of the system image on
+        # the Vega standalone headset, we don't install the APK as part of test
+        # setup. Instead, grant the permissions here so that it can take
+        # screenshots.
+        if dev.product_name == 'vega':
+          dev.GrantPermissions('com.google.vr.vrcore', [
+              'android.permission.WRITE_EXTERNAL_STORAGE',
+              'android.permission.READ_EXTERNAL_STORAGE'
+          ])
+
+      @instrumentation_tracing.no_tracing
+      def push_test_data(dev):
+        device_root = posixpath.join(dev.GetExternalStoragePath(),
+                                     'chromium_tests_root')
+        host_device_tuples_substituted = [
+            (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+            for h, d in host_device_tuples]
+        logging.info('Pushing data dependencies.')
+        for h, d in host_device_tuples_substituted:
+          logging.debug('  %r -> %r', h, d)
+        local_device_environment.place_nomedia_on_device(dev, device_root)
+        dev.PushChangedFiles(host_device_tuples_substituted,
+                             delete_device_stale=True)
+        if not host_device_tuples_substituted:
+          dev.RunShellCommand(['rm', '-rf', device_root], check_return=True)
+          dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+      @trace_event.traced
+      def create_flag_changer(dev):
+        if self._test_instance.flags:
+          self._CreateFlagChangerIfNeeded(dev)
+          logging.debug('Attempting to set flags: %r',
+                        self._test_instance.flags)
+          self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+        valgrind_tools.SetChromeTimeoutScale(
+            dev, self._test_instance.timeout_scale)
+
+      steps += [
+          set_debug_app, edit_shared_prefs, push_test_data, create_flag_changer,
+          set_vega_permissions
+      ]
+
+      def bind_crash_handler(step, dev):
+        return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+      steps = [bind_crash_handler(s, device) for s in steps]
+
+      try:
+        if self._env.concurrent_adb:
+          reraiser_thread.RunAsync(steps)
+        else:
+          for step in steps:
+            step()
+        if self._test_instance.store_tombstones:
+          tombstones.ClearAllTombstones(device)
+      except device_errors.CommandFailedError:
+        if not device.IsOnline():
+          raise
+
+        # A bugreport can be large and take a while to generate, so only capture
+        # one if we're using a remote manager.
+        if isinstance(
+            self._env.output_manager,
+            remote_output_manager.RemoteOutputManager):
+          logging.error(
+              'Error when setting up device for tests. Taking a bugreport for '
+              'investigation. This may take a while...')
+          report_name = '%s.bugreport' % device.serial
+          with self._env.output_manager.ArchivedTempfile(
+              report_name, 'bug_reports') as report_file:
+            device.TakeBugReport(report_file.name)
+          logging.error('Bug report saved to %s', report_file.Link())
+        raise
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+    # Created here instead of on a per-test basis so that the downloaded
+    # expectations can be re-used between tests, saving a significant amount
+    # of time.
+    self._skia_gold_work_dir = tempfile.mkdtemp()
+    self._skia_gold_session_manager = gold_utils.AndroidSkiaGoldSessionManager(
+        self._skia_gold_work_dir, self._test_instance.skia_gold_properties)
+    if self._test_instance.wait_for_java_debugger:
+      logging.warning('*' * 80)
+      logging.warning('Waiting for debugger to attach to process: %s',
+                      target_package)
+      logging.warning('*' * 80)
+
+  #override
+  def TearDown(self):
+    shutil.rmtree(self._skia_gold_work_dir)
+    self._skia_gold_work_dir = None
+    self._skia_gold_session_manager = None
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    @local_device_environment.handle_shard_failures_with(
+        self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_tear_down(dev):
+      if str(dev) in self._flag_changers:
+        self._flag_changers[str(dev)].Restore()
+
+      # Remove package-specific configuration
+      dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True)
+
+      valgrind_tools.SetChromeTimeoutScale(dev, None)
+
+      # Restore any shared preference files that we stored during setup.
+      # This should be run sometime before the replace package contextmanager
+      # gets exited so we don't have to special case restoring files of
+      # replaced system apps.
+      for pref_to_restore in self._shared_prefs_to_restore:
+        pref_to_restore.Commit(force_commit=True)
+
+      # Context manager exit handlers are applied in reverse order
+      # of the enter handlers.
+      for context in reversed(self._context_managers[str(dev)]):
+        # See pylint-related comment above with __enter__()
+        # pylint: disable=no-member
+        context.__exit__(*sys.exc_info())
+        # pylint: enable=no-member
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
+  def _CreateFlagChangerIfNeeded(self, device):
+    if str(device) not in self._flag_changers:
+      cmdline_file = 'test-cmdline-file'
+      if self._test_instance.use_apk_under_test_flags_file:
+        if self._test_instance.package_info:
+          cmdline_file = self._test_instance.package_info.cmdline_file
+        else:
+          raise Exception('No PackageInfo found but'
+                          '--use-apk-under-test-flags-file is specified.')
+      self._flag_changers[str(device)] = flag_changer.FlagChanger(
+          device, cmdline_file)
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _GetTests(self):
+    if self._test_instance.junit4_runner_supports_listing:
+      raw_tests = self._GetTestsFromRunner()
+      tests = self._test_instance.ProcessRawTests(raw_tests)
+    else:
+      tests = self._test_instance.GetTests()
+    tests = self._ApplyExternalSharding(
+        tests, self._test_instance.external_shard_index,
+        self._test_instance.total_external_shards)
+    return tests
+
+  #override
+  def _GroupTests(self, tests):
+    batched_tests = dict()
+    other_tests = []
+    for test in tests:
+      annotations = test['annotations']
+      if 'Batch' in annotations and 'RequiresRestart' not in annotations:
+        batch_name = annotations['Batch']['value']
+        if not batch_name:
+          batch_name = test['class']
+
+        # Feature flags won't work in instrumentation tests unless the activity
+        # is restarted.
+        # Tests with identical features are grouped to minimize restarts.
+        if 'Batch$SplitByFeature' in annotations:
+          if 'Features$EnableFeatures' in annotations:
+            batch_name += '|enabled:' + ','.join(
+                sorted(annotations['Features$EnableFeatures']['value']))
+          if 'Features$DisableFeatures' in annotations:
+            batch_name += '|disabled:' + ','.join(
+                sorted(annotations['Features$DisableFeatures']['value']))
+
+        if not batch_name in batched_tests:
+          batched_tests[batch_name] = []
+        batched_tests[batch_name].append(test)
+      else:
+        other_tests.append(test)
+
+    all_tests = []
+    for _, tests in batched_tests.items():
+      tests.sort()  # Ensure a consistent ordering across external shards.
+      all_tests.extend([
+          tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE]
+          for i in range(0, len(tests), _TEST_BATCH_MAX_GROUP_SIZE)
+      ])
+    all_tests.extend(other_tests)
+    return all_tests
+
+  #override
+  def _GetUniqueTestName(self, test):
+    return instrumentation_test_instance.GetUniqueTestName(test)
+
+  #override
+  def _RunTest(self, device, test):
+    extras = {}
+
+    # Provide package name under test for apk_under_test.
+    if self._test_instance.apk_under_test:
+      package_name = self._test_instance.apk_under_test.GetPackageName()
+      extras[_EXTRA_PACKAGE_UNDER_TEST] = package_name
+
+    flags_to_add = []
+    test_timeout_scale = None
+    if self._test_instance.coverage_directory:
+      coverage_basename = '%s' % ('%s_%s_group' %
+                                  (test[0]['class'], test[0]['method'])
+                                  if isinstance(test, list) else '%s_%s' %
+                                  (test['class'], test['method']))
+      if self._test_instance.jacoco_coverage_type:
+        coverage_basename += "_" + self._test_instance.jacoco_coverage_type
+      extras['coverage'] = 'true'
+      coverage_directory = os.path.join(
+          device.GetExternalStoragePath(), 'chrome', 'test', 'coverage')
+      if not device.PathExists(coverage_directory):
+        device.RunShellCommand(['mkdir', '-p', coverage_directory],
+                               check_return=True)
+      coverage_device_file = os.path.join(coverage_directory, coverage_basename)
+      coverage_device_file += '.exec'
+      extras['coverageFile'] = coverage_device_file
+    # Save screenshot if screenshot dir is specified (save locally) or if
+    # a GS bucket is passed (save in cloud).
+    screenshot_device_file = device_temp_file.DeviceTempFile(
+        device.adb, suffix='.png', dir=device.GetExternalStoragePath())
+    extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
+
+    # Set up the screenshot directory. This needs to be done for each test so
+    # that we only get screenshots created by that test. It has to be on
+    # external storage since the default location doesn't allow file creation
+    # from the instrumentation test app on Android L and M.
+    ui_capture_dir = device_temp_file.NamedDeviceTemporaryDirectory(
+        device.adb,
+        dir=device.GetExternalStoragePath())
+    extras[EXTRA_UI_CAPTURE_DIR] = ui_capture_dir.name
+
+    if self._env.trace_output:
+      trace_device_file = device_temp_file.DeviceTempFile(
+          device.adb, suffix='.json', dir=device.GetExternalStoragePath())
+      extras[EXTRA_TRACE_FILE] = trace_device_file.name
+
+    target = '%s/%s' % (self._test_instance.test_package,
+                        self._test_instance.junit4_runner_class)
+    if isinstance(test, list):
+
+      def name_and_timeout(t):
+        n = instrumentation_test_instance.GetTestName(t)
+        i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+        return (n, i)
+
+      test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+      test_name = instrumentation_test_instance.GetTestName(
+          test[0]) + _BATCH_SUFFIX
+      extras['class'] = ','.join(test_names)
+      test_display_name = test_name
+      timeout = min(MAX_BATCH_TEST_TIMEOUT,
+                    FIXED_TEST_TIMEOUT_OVERHEAD + sum(timeouts))
+    else:
+      assert test['is_junit4']
+      test_name = instrumentation_test_instance.GetTestName(test)
+      test_display_name = self._GetUniqueTestName(test)
+
+      extras['class'] = test_name
+      if 'flags' in test and test['flags']:
+        flags_to_add.extend(test['flags'])
+      timeout = FIXED_TEST_TIMEOUT_OVERHEAD + self._GetTimeoutFromAnnotations(
+          test['annotations'], test_display_name)
+
+      test_timeout_scale = self._GetTimeoutScaleFromAnnotations(
+          test['annotations'])
+      if test_timeout_scale and test_timeout_scale != 1:
+        valgrind_tools.SetChromeTimeoutScale(
+            device, test_timeout_scale * self._test_instance.timeout_scale)
+
+    if self._test_instance.wait_for_java_debugger:
+      timeout = None
+    logging.info('preparing to run %s: %s', test_display_name, test)
+
+    if _IsRenderTest(test):
+      # TODO(mikecase): Add DeviceTempDirectory class and use that instead.
+      self._render_tests_device_output_dir = posixpath.join(
+          device.GetExternalStoragePath(), 'render_test_output_dir')
+      flags_to_add.append('--render-test-output-dir=%s' %
+                          self._render_tests_device_output_dir)
+
+    if _IsWPRRecordReplayTest(test):
+      wpr_archive_relative_path = _GetWPRArchivePath(test)
+      if not wpr_archive_relative_path:
+        raise RuntimeError('Could not find the WPR archive file path '
+                           'from annotation.')
+      wpr_archive_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                      wpr_archive_relative_path)
+      if not os.path.isdir(wpr_archive_path):
+        raise RuntimeError('WPRArchiveDirectory annotation should point '
+                           'to a directory only. '
+                           '{0} exist: {1}'.format(
+                               wpr_archive_path,
+                               os.path.exists(wpr_archive_path)))
+
+      # Some linux version does not like # in the name. Replaces it with __.
+      archive_path = os.path.join(
+          wpr_archive_path,
+          _ReplaceUncommonChars(self._GetUniqueTestName(test)) + '.wprgo')
+
+      if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH):
+        # If we got to this stage, then we should have
+        # checkout_android set.
+        raise RuntimeError(
+            'WPR Go binary not found at {}'.format(_WPR_GO_LINUX_X86_64_PATH))
+      # Tells the server to use the binaries retrieved from CIPD.
+      chrome_proxy_utils.ChromeProxySession.SetWPRServerBinary(
+          _WPR_GO_LINUX_X86_64_PATH)
+      self._chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+      self._chrome_proxy.wpr_record_mode = self._test_instance.wpr_record_mode
+      self._chrome_proxy.Start(device, archive_path)
+      flags_to_add.extend(self._chrome_proxy.GetFlags())
+
+    if flags_to_add:
+      self._CreateFlagChangerIfNeeded(device)
+      self._flag_changers[str(device)].PushFlags(add=flags_to_add)
+
+    time_ms = lambda: int(time.time() * 1e3)
+    start_ms = time_ms()
+
+    with ui_capture_dir:
+      with self._ArchiveLogcat(device, test_name) as logcat_file:
+        output = device.StartInstrumentation(
+            target, raw=True, extras=extras, timeout=timeout, retries=0)
+
+      duration_ms = time_ms() - start_ms
+
+      with contextlib_ext.Optional(
+          trace_event.trace('ProcessResults'),
+          self._env.trace_output):
+        output = self._test_instance.MaybeDeobfuscateLines(output)
+        # TODO(jbudorick): Make instrumentation tests output a JSON so this
+        # doesn't have to parse the output.
+        result_code, result_bundle, statuses = (
+            self._test_instance.ParseAmInstrumentRawOutput(output))
+        results = self._test_instance.GenerateTestResults(
+            result_code, result_bundle, statuses, duration_ms,
+            device.product_cpu_abi, self._test_instance.symbolizer)
+
+      if self._env.trace_output:
+        self._SaveTraceData(trace_device_file, device, test['class'])
+
+
+      def restore_flags():
+        if flags_to_add:
+          self._flag_changers[str(device)].Restore()
+
+      def restore_timeout_scale():
+        if test_timeout_scale:
+          valgrind_tools.SetChromeTimeoutScale(
+              device, self._test_instance.timeout_scale)
+
+      def handle_coverage_data():
+        if self._test_instance.coverage_directory:
+          try:
+            if not os.path.exists(self._test_instance.coverage_directory):
+              os.makedirs(self._test_instance.coverage_directory)
+            device.PullFile(coverage_device_file,
+                            self._test_instance.coverage_directory)
+            device.RemovePath(coverage_device_file, True)
+          except (OSError, base_error.BaseError) as e:
+            logging.warning('Failed to handle coverage data after tests: %s', e)
+
+      def handle_render_test_data():
+        if _IsRenderTest(test):
+          # Render tests do not cause test failure by default. So we have to
+          # check to see if any failure images were generated even if the test
+          # does not fail.
+          try:
+            self._ProcessRenderTestResults(device, results)
+          finally:
+            device.RemovePath(self._render_tests_device_output_dir,
+                              recursive=True,
+                              force=True)
+            self._render_tests_device_output_dir = None
+
+      def pull_ui_screen_captures():
+        screenshots = []
+        for filename in device.ListDirectory(ui_capture_dir.name):
+          if filename.endswith('.json'):
+            screenshots.append(pull_ui_screenshot(filename))
+        if screenshots:
+          json_archive_name = 'ui_capture_%s_%s.json' % (
+              test_name.replace('#', '.'),
+              time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+          with self._env.output_manager.ArchivedTempfile(
+              json_archive_name, 'ui_capture', output_manager.Datatype.JSON
+              ) as json_archive:
+            json.dump(screenshots, json_archive)
+          _SetLinkOnResults(results, test_name, 'ui screenshot',
+                            json_archive.Link())
+
+      def pull_ui_screenshot(filename):
+        source_dir = ui_capture_dir.name
+        json_path = posixpath.join(source_dir, filename)
+        json_data = json.loads(device.ReadFile(json_path))
+        image_file_path = posixpath.join(source_dir, json_data['location'])
+        with self._env.output_manager.ArchivedTempfile(
+            json_data['location'], 'ui_capture', output_manager.Datatype.PNG
+            ) as image_archive:
+          device.PullFile(image_file_path, image_archive.name)
+        json_data['image_link'] = image_archive.Link()
+        return json_data
+
+      def stop_chrome_proxy():
+        # Removes the port forwarding
+        if self._chrome_proxy:
+          self._chrome_proxy.Stop(device)
+          if not self._chrome_proxy.wpr_replay_mode:
+            logging.info('WPR Record test generated archive file %s',
+                         self._chrome_proxy.wpr_archive_path)
+          self._chrome_proxy = None
+
+
+      # While constructing the TestResult objects, we can parallelize several
+      # steps that involve ADB. These steps should NOT depend on any info in
+      # the results! Things such as whether the test CRASHED have not yet been
+      # determined.
+      post_test_steps = [
+          restore_flags, restore_timeout_scale, stop_chrome_proxy,
+          handle_coverage_data, handle_render_test_data, pull_ui_screen_captures
+      ]
+      if self._env.concurrent_adb:
+        reraiser_thread.RunAsync(post_test_steps)
+      else:
+        for step in post_test_steps:
+          step()
+
+    if logcat_file:
+      _SetLinkOnResults(results, test_name, 'logcat', logcat_file.Link())
+
+    # Update the result name if the test used flags.
+    if flags_to_add:
+      for r in results:
+        if r.GetName() == test_name:
+          r.SetName(test_display_name)
+
+    # Add UNKNOWN results for any missing tests.
+    iterable_test = test if isinstance(test, list) else [test]
+    test_names = set(self._GetUniqueTestName(t) for t in iterable_test)
+    results_names = set(r.GetName() for r in results)
+    results.extend(
+        base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN)
+        for u in test_names.difference(results_names))
+
+    # Update the result type if we detect a crash.
+    try:
+      if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+        for r in results:
+          if r.GetType() == base_test_result.ResultType.UNKNOWN:
+            r.SetType(base_test_result.ResultType.CRASH)
+    except device_errors.CommandTimeoutError:
+      logging.warning('timed out when detecting/dismissing error dialogs')
+      # Attach screenshot to the test to help with debugging the dialog boxes.
+      self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+                           results, 'dialog_box_screenshot')
+
+    # The crash result can be set above or in
+    # InstrumentationTestRun.GenerateTestResults. If a test crashes,
+    # subprocesses such as the one used by EmbeddedTestServerRule can be left
+    # alive in a bad state, so kill them now.
+    for r in results:
+      if r.GetType() == base_test_result.ResultType.CRASH:
+        for apk in self._test_instance.additional_apks:
+          device.ForceStop(apk.GetPackageName())
+
+    # Handle failures by:
+    #   - optionally taking a screenshot
+    #   - logging the raw output at INFO level
+    #   - clearing the application state while persisting permissions
+    if any(r.GetType() not in (base_test_result.ResultType.PASS,
+                               base_test_result.ResultType.SKIP)
+           for r in results):
+      self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+                           results, 'post_test_screenshot')
+
+      logging.info('detected failure in %s. raw output:', test_display_name)
+      for l in output:
+        logging.info('  %s', l)
+      if (not self._env.skip_clear_data
+          and self._test_instance.package_info):
+        permissions = (
+            self._test_instance.apk_under_test.GetPermissions()
+            if self._test_instance.apk_under_test
+            else None)
+        device.ClearApplicationState(self._test_instance.package_info.package,
+                                     permissions=permissions)
+    else:
+      logging.debug('raw output from %s:', test_display_name)
+      for l in output:
+        logging.debug('  %s', l)
+
+    if self._test_instance.store_tombstones:
+      resolved_tombstones = tombstones.ResolveTombstones(
+          device,
+          resolve_all_tombstones=True,
+          include_stack_symbols=False,
+          wipe_tombstones=True,
+          tombstone_symbolizer=self._test_instance.symbolizer)
+      if resolved_tombstones:
+        tombstone_filename = 'tombstones_%s_%s' % (time.strftime(
+            '%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+        with self._env.output_manager.ArchivedTempfile(
+            tombstone_filename, 'tombstones') as tombstone_file:
+          tombstone_file.write('\n'.join(resolved_tombstones))
+
+        # Associate tombstones with first crashing test.
+        for result in results:
+          if result.GetType() == base_test_result.ResultType.CRASH:
+            result.SetLink('tombstones', tombstone_file.Link())
+            break
+        else:
+          # We don't always detect crashes correctly. In this case,
+          # associate with the first test.
+          results[0].SetLink('tombstones', tombstone_file.Link())
+
+    unknown_tests = set(r.GetName() for r in results
+                        if r.GetType() == base_test_result.ResultType.UNKNOWN)
+
+    # If a test that is batched crashes, the rest of the tests in that batch
+    # won't be ran and will have their status left as unknown in results,
+    # so rerun the tests. (see crbug/1127935)
+    # Need to "unbatch" the tests, so that on subsequent tries, the tests can
+    # get ran individually. This prevents an unrecognized crash from preventing
+    # the tests in the batch from being ran. Running the test as unbatched does
+    # not happen until a retry happens at the local_device_test_run/environment
+    # level.
+    tests_to_rerun = []
+    for t in iterable_test:
+      if self._GetUniqueTestName(t) in unknown_tests:
+        prior_attempts = t.get('run_attempts', 0)
+        t['run_attempts'] = prior_attempts + 1
+        # It's possible every test in the batch could crash, so need to
+        # try up to as many times as tests that there are.
+        if prior_attempts < len(results):
+          if t['annotations']:
+            t['annotations'].pop('Batch', None)
+          tests_to_rerun.append(t)
+
+    # If we have a crash that isn't recognized as a crash in a batch, the tests
+    # will be marked as unknown. Sometimes a test failure causes a crash, but
+    # the crash isn't recorded because the failure was detected first.
+    # When the UNKNOWN tests are reran while unbatched and pass,
+    # they'll have an UNKNOWN, PASS status, so will be improperly marked as
+    # flaky, so change status to NOTRUN and don't try rerunning. They will
+    # get rerun individually at the local_device_test_run/environment level.
+    # as the "Batch" annotation was removed.
+    found_crash_or_fail = False
+    for r in results:
+      if (r.GetType() == base_test_result.ResultType.CRASH
+          or r.GetType() == base_test_result.ResultType.FAIL):
+        found_crash_or_fail = True
+        break
+    if not found_crash_or_fail:
+      # Don't bother rerunning since the unrecognized crashes in
+      # the batch will keep failing.
+      tests_to_rerun = None
+      for r in results:
+        if r.GetType() == base_test_result.ResultType.UNKNOWN:
+          r.SetType(base_test_result.ResultType.NOTRUN)
+
+    return results, tests_to_rerun if tests_to_rerun else None
+
+  def _GetTestsFromRunner(self):
+    test_apk_path = self._test_instance.test_apk.path
+    pickle_path = '%s-runner.pickle' % test_apk_path
+    # For incremental APKs, the code doesn't live in the apk, so instead check
+    # the timestamp of the target's .stamp file.
+    if self._test_instance.test_apk_incremental_install_json:
+      with open(self._test_instance.test_apk_incremental_install_json) as f:
+        data = json.load(f)
+      out_dir = constants.GetOutDirectory()
+      test_mtime = max(
+          os.path.getmtime(os.path.join(out_dir, p)) for p in data['dex_files'])
+    else:
+      test_mtime = os.path.getmtime(test_apk_path)
+
+    try:
+      return instrumentation_test_instance.GetTestsFromPickle(
+          pickle_path, test_mtime)
+    except instrumentation_test_instance.TestListPickleException as e:
+      logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests by having %s list them.',
+                 self._test_instance.junit4_runner_class)
+    def list_tests(d):
+      def _run(dev):
+        # We need to use GetAppWritablePath instead of GetExternalStoragePath
+        # here because we will not have applied legacy storage workarounds on R+
+        # yet.
+        with device_temp_file.DeviceTempFile(
+            dev.adb, suffix='.json',
+            dir=dev.GetAppWritablePath()) as dev_test_list_json:
+          junit4_runner_class = self._test_instance.junit4_runner_class
+          test_package = self._test_instance.test_package
+          extras = {
+            'log': 'true',
+            # Workaround for https://github.com/mockito/mockito/issues/922
+            'notPackage': 'net.bytebuddy',
+          }
+          extras[_EXTRA_TEST_LIST] = dev_test_list_json.name
+          target = '%s/%s' % (test_package, junit4_runner_class)
+          timeout = 240
+          if self._test_instance.wait_for_java_debugger:
+            timeout = None
+          with self._ArchiveLogcat(dev, 'list_tests'):
+            test_list_run_output = dev.StartInstrumentation(
+                target, extras=extras, retries=0, timeout=timeout)
+          if any(test_list_run_output):
+            logging.error('Unexpected output while listing tests:')
+            for line in test_list_run_output:
+              logging.error('  %s', line)
+          with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+            host_file = os.path.join(host_dir, 'list_tests.json')
+            dev.PullFile(dev_test_list_json.name, host_file)
+            with open(host_file, 'r') as host_file:
+              return json.load(host_file)
+
+      return crash_handler.RetryOnSystemCrash(_run, d)
+
+    raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+    # If all devices failed to list tests, raise an exception.
+    # Check that tl is not None and is not empty.
+    if all(not tl for tl in raw_test_lists):
+      raise device_errors.CommandFailedError(
+          'Failed to list tests on any device')
+
+    # Get the first viable list of raw tests
+    raw_tests = [tl for tl in raw_test_lists if tl][0]
+
+    instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests)
+    return raw_tests
+
+  @contextlib.contextmanager
+  def _ArchiveLogcat(self, device, test_name):
+    stream_name = 'logcat_%s_shard%s_%s_%s' % (
+        test_name.replace('#', '.'), self._test_instance.external_shard_index,
+        time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+
+    logcat_file = None
+    logmon = None
+    try:
+      with self._env.output_manager.ArchivedTempfile(
+          stream_name, 'logcat') as logcat_file:
+        with logcat_monitor.LogcatMonitor(
+            device.adb,
+            filter_specs=local_device_environment.LOGCAT_FILTERS,
+            output_file=logcat_file.name,
+            transform_func=self._test_instance.MaybeDeobfuscateLines,
+            check_error=False) as logmon:
+          with _LogTestEndpoints(device, test_name):
+            with contextlib_ext.Optional(
+                trace_event.trace(test_name),
+                self._env.trace_output):
+              yield logcat_file
+    finally:
+      if logmon:
+        logmon.Close()
+      if logcat_file and logcat_file.Link():
+        logging.info('Logcat saved to %s', logcat_file.Link())
+
+  def _SaveTraceData(self, trace_device_file, device, test_class):
+    trace_host_file = self._env.trace_output
+
+    if device.FileExists(trace_device_file.name):
+      try:
+        java_trace_json = device.ReadFile(trace_device_file.name)
+      except IOError:
+        raise Exception('error pulling trace file from device')
+      finally:
+        trace_device_file.close()
+
+      process_name = '%s (device %s)' % (test_class, device.serial)
+      process_hash = int(hashlib.md5(process_name).hexdigest()[:6], 16)
+
+      java_trace = json.loads(java_trace_json)
+      java_trace.sort(key=lambda event: event['ts'])
+
+      get_date_command = 'echo $EPOCHREALTIME'
+      device_time = device.RunShellCommand(get_date_command, single_line=True)
+      device_time = float(device_time) * 1e6
+      system_time = trace_time.Now()
+      time_difference = system_time - device_time
+
+      threads_to_add = set()
+      for event in java_trace:
+        # Ensure thread ID and thread name will be linked in the metadata.
+        threads_to_add.add((event['tid'], event['name']))
+
+        event['pid'] = process_hash
+
+        # Adjust time stamp to align with Python trace times (from
+        # trace_time.Now()).
+        event['ts'] += time_difference
+
+      for tid, thread_name in threads_to_add:
+        thread_name_metadata = {'pid': process_hash, 'tid': tid,
+                                'ts': 0, 'ph': 'M', 'cat': '__metadata',
+                                'name': 'thread_name',
+                                'args': {'name': thread_name}}
+        java_trace.append(thread_name_metadata)
+
+      process_name_metadata = {'pid': process_hash, 'tid': 0, 'ts': 0,
+                               'ph': 'M', 'cat': '__metadata',
+                               'name': 'process_name',
+                               'args': {'name': process_name}}
+      java_trace.append(process_name_metadata)
+
+      java_trace_json = json.dumps(java_trace)
+      java_trace_json = java_trace_json.rstrip(' ]')
+
+      with open(trace_host_file, 'r') as host_handle:
+        host_contents = host_handle.readline()
+
+      if host_contents:
+        java_trace_json = ',%s' % java_trace_json.lstrip(' [')
+
+      with open(trace_host_file, 'a') as host_handle:
+        host_handle.write(java_trace_json)
+
+  def _SaveScreenshot(self, device, screenshot_device_file, test_name, results,
+                      link_name):
+    screenshot_filename = '%s-%s.png' % (
+        test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+    if device.FileExists(screenshot_device_file.name):
+      with self._env.output_manager.ArchivedTempfile(
+          screenshot_filename, 'screenshot',
+          output_manager.Datatype.PNG) as screenshot_host_file:
+        try:
+          device.PullFile(screenshot_device_file.name,
+                          screenshot_host_file.name)
+        finally:
+          screenshot_device_file.close()
+      _SetLinkOnResults(results, test_name, link_name,
+                        screenshot_host_file.Link())
+
+  def _ProcessRenderTestResults(self, device, results):
+    if not self._render_tests_device_output_dir:
+      return
+    self._ProcessSkiaGoldRenderTestResults(device, results)
+
+  def _ProcessSkiaGoldRenderTestResults(self, device, results):
+    gold_dir = posixpath.join(self._render_tests_device_output_dir,
+                              _DEVICE_GOLD_DIR)
+    if not device.FileExists(gold_dir):
+      return
+
+    gold_properties = self._test_instance.skia_gold_properties
+    with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+      use_luci = not (gold_properties.local_pixel_tests
+                      or gold_properties.no_luci_auth)
+
+      # Pull everything at once instead of pulling individually, as it's
+      # slightly faster since each command over adb has some overhead compared
+      # to doing the same thing locally.
+      host_dir = os.path.join(host_dir, _DEVICE_GOLD_DIR)
+      device.PullFile(gold_dir, host_dir)
+      for image_name in os.listdir(host_dir):
+        if not image_name.endswith('.png'):
+          continue
+
+        render_name = image_name[:-4]
+        json_name = render_name + '.json'
+        json_path = os.path.join(host_dir, json_name)
+        image_path = os.path.join(host_dir, image_name)
+        full_test_name = None
+        if not os.path.exists(json_path):
+          _FailTestIfNecessary(results, full_test_name)
+          _AppendToLog(
+              results, full_test_name,
+              'Unable to find corresponding JSON file for image %s '
+              'when doing Skia Gold comparison.' % image_name)
+          continue
+
+        # Add 'ignore': '1' if a comparison failure would not be surfaced, as
+        # that implies that we aren't actively maintaining baselines for the
+        # test. This helps prevent unrelated CLs from getting comments posted to
+        # them.
+        # Additionally, add the ignore if we're running on a trybot and this is
+        # not our final retry attempt in order to prevent unrelated CLs from
+        # getting spammed if a test is flaky.
+        should_rewrite = False
+        with open(json_path) as infile:
+          # All the key/value pairs in the JSON file are strings, so convert
+          # to a bool.
+          json_dict = json.load(infile)
+          fail_on_unsupported = json_dict.get('fail_on_unsupported_configs',
+                                              'false')
+          fail_on_unsupported = fail_on_unsupported.lower() == 'true'
+          # Grab the full test name so we can associate the comparison with a
+          # particular test, which is necessary if tests are batched together.
+          # Remove the key/value pair from the JSON since we don't need/want to
+          # upload it to Gold.
+          full_test_name = json_dict.get('full_test_name')
+          if 'full_test_name' in json_dict:
+            should_rewrite = True
+            del json_dict['full_test_name']
+
+        running_on_unsupported = (
+            device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get(
+                device.product_model, []) and not fail_on_unsupported)
+        # TODO(skbug.com/10787): Remove the ignore on non-final retry once we
+        # fully switch over to using the Gerrit plugin for surfacing Gold
+        # information since it does not spam people with emails due to automated
+        # comments.
+        not_final_retry = self._env.current_try + 1 != self._env.max_tries
+        tryjob_but_not_final_retry =\
+            not_final_retry and gold_properties.IsTryjobRun()
+        should_ignore_in_gold =\
+            running_on_unsupported or tryjob_but_not_final_retry
+        # We still want to fail the test even if we're ignoring the image in
+        # Gold if we're running on a supported configuration, so
+        # should_ignore_in_gold != should_hide_failure.
+        should_hide_failure = running_on_unsupported
+        if should_ignore_in_gold:
+          should_rewrite = True
+          json_dict['ignore'] = '1'
+        if should_rewrite:
+          with open(json_path, 'w') as outfile:
+            json.dump(json_dict, outfile)
+
+        gold_session = self._skia_gold_session_manager.GetSkiaGoldSession(
+            keys_input=json_path)
+
+        try:
+          status, error = gold_session.RunComparison(
+              name=render_name,
+              png_file=image_path,
+              output_manager=self._env.output_manager,
+              use_luci=use_luci)
+        except Exception as e:  # pylint: disable=broad-except
+          _FailTestIfNecessary(results, full_test_name)
+          _AppendToLog(results, full_test_name,
+                       'Skia Gold comparison raised exception: %s' % e)
+          continue
+
+        if not status:
+          continue
+
+        # Don't fail the test if we ran on an unsupported configuration unless
+        # the test has explicitly opted in, as it's likely that baselines
+        # aren't maintained for that configuration.
+        if should_hide_failure:
+          if self._test_instance.skia_gold_properties.local_pixel_tests:
+            _AppendToLog(
+                results, full_test_name,
+                'Gold comparison for %s failed, but model %s with SDK '
+                '%d is not a supported configuration. This failure would be '
+                'ignored on the bots, but failing since tests are being run '
+                'locally.' %
+                (render_name, device.product_model, device.build_version_sdk))
+          else:
+            _AppendToLog(
+                results, full_test_name,
+                'Gold comparison for %s failed, but model %s with SDK '
+                '%d is not a supported configuration, so ignoring failure.' %
+                (render_name, device.product_model, device.build_version_sdk))
+            continue
+
+        _FailTestIfNecessary(results, full_test_name)
+        failure_log = (
+            'Skia Gold reported failure for RenderTest %s. See '
+            'RENDER_TESTS.md for how to fix this failure.' % render_name)
+        status_codes =\
+            self._skia_gold_session_manager.GetSessionClass().StatusCodes
+        if status == status_codes.AUTH_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Gold authentication failed with output %s' % error)
+        elif status == status_codes.INIT_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Gold initialization failed with output %s' % error)
+        elif status == status_codes.COMPARISON_FAILURE_REMOTE:
+          public_triage_link, internal_triage_link =\
+              gold_session.GetTriageLinks(render_name)
+          if not public_triage_link:
+            _AppendToLog(
+                results, full_test_name,
+                'Failed to get triage link for %s, raw output: %s' %
+                (render_name, error))
+            _AppendToLog(
+                results, full_test_name, 'Reason for no triage link: %s' %
+                gold_session.GetTriageLinkOmissionReason(render_name))
+            continue
+          if gold_properties.IsTryjobRun():
+            _SetLinkOnResults(results, full_test_name,
+                              'Public Skia Gold triage link for entire CL',
+                              public_triage_link)
+            _SetLinkOnResults(results, full_test_name,
+                              'Internal Skia Gold triage link for entire CL',
+                              internal_triage_link)
+          else:
+            _SetLinkOnResults(
+                results, full_test_name,
+                'Public Skia Gold triage link for %s' % render_name,
+                public_triage_link)
+            _SetLinkOnResults(
+                results, full_test_name,
+                'Internal Skia Gold triage link for %s' % render_name,
+                internal_triage_link)
+          _AppendToLog(results, full_test_name, failure_log)
+
+        elif status == status_codes.COMPARISON_FAILURE_LOCAL:
+          given_link = gold_session.GetGivenImageLink(render_name)
+          closest_link = gold_session.GetClosestImageLink(render_name)
+          diff_link = gold_session.GetDiffImageLink(render_name)
+
+          processed_template_output = _GenerateRenderTestHtml(
+              render_name, given_link, closest_link, diff_link)
+          with self._env.output_manager.ArchivedTempfile(
+              '%s.html' % render_name, 'gold_local_diffs',
+              output_manager.Datatype.HTML) as html_results:
+            html_results.write(processed_template_output)
+          _SetLinkOnResults(results, full_test_name, render_name,
+                            html_results.Link())
+          _AppendToLog(
+              results, full_test_name,
+              'See %s link for diff image with closest positive.' % render_name)
+        elif status == status_codes.LOCAL_DIFF_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Failed to generate diffs from Gold: %s' % error)
+        else:
+          logging.error(
+              'Given unhandled SkiaGoldSession StatusCode %s with error %s',
+              status, error)
+
+  #override
+  def _ShouldRetry(self, test, result):
+    # We've tried to disable retries in the past with mixed results.
+    # See crbug.com/619055 for historical context and crbug.com/797002
+    # for ongoing efforts.
+    if 'Batch' in test['annotations'] and test['annotations']['Batch'][
+        'value'] == 'UnitTests':
+      return False
+    del test, result
+    return True
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  @classmethod
+  def _GetTimeoutScaleFromAnnotations(cls, annotations):
+    try:
+      return int(annotations.get('TimeoutScale', {}).get('value', 1))
+    except ValueError as e:
+      logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+      return 1
+
+  @classmethod
+  def _GetTimeoutFromAnnotations(cls, annotations, test_name):
+    for k, v in TIMEOUT_ANNOTATIONS:
+      if k in annotations:
+        timeout = v
+        break
+    else:
+      logging.warning('Using default 1 minute timeout for %s', test_name)
+      timeout = 60
+
+    timeout *= cls._GetTimeoutScaleFromAnnotations(annotations)
+
+    return timeout
+
+
+def _IsWPRRecordReplayTest(test):
+  """Determines whether a test or a list of tests is a WPR RecordReplay Test."""
+  if not isinstance(test, list):
+    test = [test]
+  return any([
+      WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+          FEATURE_ANNOTATION, {}).get('value', ()) for t in test
+  ])
+
+
+def _GetWPRArchivePath(test):
+  """Retrieves the archive path from the WPRArchiveDirectory annotation."""
+  return test['annotations'].get(WPR_ARCHIVE_FILE_PATH_ANNOTATION,
+                                 {}).get('value', ())
+
+
+def _ReplaceUncommonChars(original):
+  """Replaces uncommon characters with __."""
+  if not original:
+    raise ValueError('parameter should not be empty')
+
+  uncommon_chars = ['#']
+  for char in uncommon_chars:
+    original = original.replace(char, '__')
+  return original
+
+
+def _IsRenderTest(test):
+  """Determines if a test or list of tests has a RenderTest amongst them."""
+  if not isinstance(test, list):
+    test = [test]
+  return any([RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+              FEATURE_ANNOTATION, {}).get('value', ()) for t in test])
+
+
+def _GenerateRenderTestHtml(image_name, failure_link, golden_link, diff_link):
+  """Generates a RenderTest results page.
+
+  Displays the generated (failure) image, the golden image, and the diff
+  between them.
+
+  Args:
+    image_name: The name of the image whose comparison failed.
+    failure_link: The URL to the generated/failure image.
+    golden_link: The URL to the golden image.
+    diff_link: The URL to the diff image between the failure and golden images.
+
+  Returns:
+    A string containing the generated HTML.
+  """
+  jinja2_env = jinja2.Environment(
+      loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR), trim_blocks=True)
+  template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
+  # pylint: disable=no-member
+  return template.render(
+      test_name=image_name,
+      failure_link=failure_link,
+      golden_link=golden_link,
+      diff_link=diff_link)
+
+
+def _FailTestIfNecessary(results, full_test_name):
+  """Marks the given results as failed if it wasn't already.
+
+  Marks the result types as ResultType.FAIL unless they were already some sort
+  of failure type, e.g. ResultType.CRASH.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, failing all tests in the batch.',
+        full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    if result.GetType() not in [
+        base_test_result.ResultType.FAIL, base_test_result.ResultType.CRASH,
+        base_test_result.ResultType.TIMEOUT, base_test_result.ResultType.UNKNOWN
+    ]:
+      result.SetType(base_test_result.ResultType.FAIL)
+
+
+def _AppendToLog(results, full_test_name, line):
+  """Appends the given line to the end of the logs of the given results.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+    line: A string to be appended as a neww line to the log of |result|.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, appending to log of all tests '
+        'in the batch.', full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    result.SetLog(result.GetLog() + '\n' + line)
+
+
+def _SetLinkOnResults(results, full_test_name, link_name, link):
+  """Sets the given link on the given results.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+    link_name: A string containing the name of the link being set.
+    link: A string containing the lkink being set.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, adding link to results of all '
+        'tests in the batch.', full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    result.SetLink(link_name, link)
+
+
+def _MatchingTestInResults(results, full_test_name):
+  """Checks if any tests named |full_test_name| are in |results|.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.Some
+
+  Returns:
+    True if one of the results in |results| has the same name as
+    |full_test_name|, otherwise False.
+  """
+  return any([r for r in results if r.GetName() == full_test_name])
+
+
+def _ShouldReportNoMatchingResult(full_test_name):
+  """Determines whether a failure to find a matching result is actually bad.
+
+  Args:
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.Some
+
+  Returns:
+    False if the failure to find a matching result is expected and should not
+    be reported, otherwise True.
+  """
+  if full_test_name is not None and full_test_name.endswith(_BATCH_SUFFIX):
+    # Handle batched tests, whose reported name is the first test's name +
+    # "_batch".
+    return False
+  return True
diff --git a/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
new file mode 100755
index 0000000..7870cd1
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for local_device_instrumentation_test_run."""
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base import base_test_result
+from pylib.base import mock_environment
+from pylib.base import mock_test_instance
+from pylib.local.device import local_device_instrumentation_test_run
+
+
+class LocalDeviceInstrumentationTestRunTest(unittest.TestCase):
+
+  def setUp(self):
+    super(LocalDeviceInstrumentationTestRunTest, self).setUp()
+    self._env = mock_environment.MockEnvironment()
+    self._ti = mock_test_instance.MockTestInstance()
+    self._obj = (
+        local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun(
+            self._env, self._ti))
+
+  # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth
+  # retaining and remove these tests if not.
+
+  def testShouldRetry_failure(self):
+    test = {
+        'annotations': {},
+        'class': 'SadTest',
+        'method': 'testFailure',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testFailure', base_test_result.ResultType.FAIL)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testShouldRetry_retryOnFailure(self):
+    test = {
+        'annotations': {'RetryOnFailure': None},
+        'class': 'SadTest',
+        'method': 'testRetryOnFailure',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testRetryOnFailure', base_test_result.ResultType.FAIL)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testShouldRetry_notRun(self):
+    test = {
+        'annotations': {},
+        'class': 'SadTest',
+        'method': 'testNotRun',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testNotRun', base_test_result.ResultType.NOTRUN)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testIsWPRRecordReplayTest_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['WPRRecordReplayTest', 'dummy']
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertTrue(
+        local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+  def testIsWPRRecordReplayTest_noMatchedKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['abc', 'dummy']
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(
+        local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+  def testGetWPRArchivePath_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'WPRArchiveDirectory': {
+                'value': 'abc'
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertEqual(
+        local_device_instrumentation_test_run._GetWPRArchivePath(test), 'abc')
+
+  def testGetWPRArchivePath_noMatchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': 'abc'
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(
+        local_device_instrumentation_test_run._GetWPRArchivePath(test))
+
+  def testIsRenderTest_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['RenderTest', 'dummy']
+            }
+        },
+        'class': 'DummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertTrue(local_device_instrumentation_test_run._IsRenderTest(test))
+
+  def testIsRenderTest_noMatchedKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['abc', 'dummy']
+            }
+        },
+        'class': 'DummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(local_device_instrumentation_test_run._IsRenderTest(test))
+
+  def testReplaceUncommonChars(self):
+    original = 'abc#edf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abc__edf')
+    original = 'abc#edf#hhf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abc__edf__hhf')
+    original = 'abcedfhhf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abcedfhhf')
+    original = None
+    with self.assertRaises(ValueError):
+      local_device_instrumentation_test_run._ReplaceUncommonChars(original)
+    original = ''
+    with self.assertRaises(ValueError):
+      local_device_instrumentation_test_run._ReplaceUncommonChars(original)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/device/local_device_monkey_test_run.py b/src/build/android/pylib/local/device/local_device_monkey_test_run.py
new file mode 100644
index 0000000..f0d2339
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_monkey_test_run.py
@@ -0,0 +1,128 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceMonkeyTestRun, self).__init__(env, test_instance)
+
+  def TestPackage(self):
+    return 'monkey'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def _RunTest(self, device, test):
+    device.ClearApplicationState(self._test_instance.package)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Launch Chrome and verify Chrome has the same PID before and after
+    # the test.
+    device.StartActivity(
+        intent.Intent(package=self._test_instance.package,
+                      activity=self._test_instance.activity,
+                      action='android.intent.action.MAIN'),
+        blocking=True, force_stop=True)
+    before_pids = device.GetPids(self._test_instance.package)
+
+    output = ''
+    if before_pids:
+      if len(before_pids.get(self._test_instance.package, [])) > 1:
+        raise Exception(
+            'At most one instance of process %s expected but found pids: '
+            '%s' % (self._test_instance.package, before_pids))
+      output = '\n'.join(self._LaunchMonkeyTest(device))
+      after_pids = device.GetPids(self._test_instance.package)
+
+    crashed = True
+    if not self._test_instance.package in before_pids:
+      logging.error('Failed to start the process.')
+    elif not self._test_instance.package in after_pids:
+      logging.error('Process %s has died.',
+                    before_pids[self._test_instance.package])
+    elif (before_pids[self._test_instance.package] !=
+          after_pids[self._test_instance.package]):
+      logging.error('Detected process restart %s -> %s',
+                    before_pids[self._test_instance.package],
+                    after_pids[self._test_instance.package])
+    else:
+      crashed = False
+
+    success_pattern = 'Events injected: %d' % self._test_instance.event_count
+    if success_pattern in output and not crashed:
+      result = base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.PASS, log=output)
+    else:
+      result = base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.FAIL, log=output)
+      if 'chrome' in self._test_instance.package:
+        logging.warning('Starting MinidumpUploadService...')
+        # TODO(jbudorick): Update this after upstreaming.
+        minidump_intent = intent.Intent(
+            action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+            package=self._test_instance.package,
+            activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+        try:
+          device.RunShellCommand(
+              ['am', 'startservice'] + minidump_intent.am_args,
+              as_root=True, check_return=True)
+        except device_errors.CommandFailedError:
+          logging.exception('Failed to start MinidumpUploadService')
+
+    return result, None
+
+  #override
+  def TearDown(self):
+    pass
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _ShouldShard(self):
+    # TODO(mikecase): Run Monkey test concurrently on each attached device.
+    return False
+
+  #override
+  def _GetTests(self):
+    return ['MonkeyTest']
+
+  def _LaunchMonkeyTest(self, device):
+    try:
+      cmd = ['monkey',
+             '-p', self._test_instance.package,
+             '--throttle', str(self._test_instance.throttle),
+             '-s', str(self._test_instance.seed),
+             '--monitor-native-crashes',
+             '--kill-process-after-error']
+      for category in self._test_instance.categories:
+        cmd.extend(['-c', category])
+      for _ in range(self._test_instance.verbose_count):
+        cmd.append('-v')
+      cmd.append(str(self._test_instance.event_count))
+      return device.RunShellCommand(
+          cmd, timeout=self._test_instance.timeout, check_return=True)
+    finally:
+      try:
+        # Kill the monkey test process on the device. If you manually
+        # interrupt the test run, this will prevent the monkey test from
+        # continuing to run.
+        device.KillAll('com.android.commands.monkey')
+      except device_errors.CommandFailedError:
+        pass
diff --git a/src/build/android/pylib/local/device/local_device_test_run.py b/src/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000..6fa0af7
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,394 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import logging
+import posixpath
+import signal
+import thread
+import threading
+
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android.sdk import version_codes
+from devil.android.tools import device_recovery
+from devil.utils import signal_handler
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+from pylib.local.device import local_device_environment
+
+
+_SIGTERM_TEST_LOG = (
+  '  Suite execution terminated, probably due to swarming timeout.\n'
+  '  Your test may not have run.')
+
+
+def SubstituteDeviceRoot(device_path, device_root):
+  if not device_path:
+    return device_root
+  elif isinstance(device_path, list):
+    return posixpath.join(*(p if p else device_root for p in device_path))
+  else:
+    return device_path
+
+
+class TestsTerminated(Exception):
+  pass
+
+
+class InvalidShardingSettings(Exception):
+  def __init__(self, shard_index, total_shards):
+    super(InvalidShardingSettings, self).__init__(
+        'Invalid sharding settings. shard_index: %d total_shards: %d'
+            % (shard_index, total_shards))
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+  def __init__(self, env, test_instance):
+    super(LocalDeviceTestRun, self).__init__(env, test_instance)
+    self._tools = {}
+    # This is intended to be filled by a child class.
+    self._installed_packages = []
+    env.SetPreferredAbis(test_instance.GetPreferredAbis())
+
+  #override
+  def RunTests(self, results):
+    tests = self._GetTests()
+
+    exit_now = threading.Event()
+
+    @local_device_environment.handle_shard_failures
+    def run_tests_on_device(dev, tests, results):
+      # This is performed here instead of during setup because restarting the
+      # device clears app compatibility flags, which will happen if a device
+      # needs to be recovered.
+      SetAppCompatibilityFlagsIfNecessary(self._installed_packages, dev)
+      consecutive_device_errors = 0
+      for test in tests:
+        if not test:
+          logging.warning('No tests in shared. Continuing.')
+          tests.test_completed()
+          continue
+        if exit_now.isSet():
+          thread.exit()
+
+        result = None
+        rerun = None
+        try:
+          result, rerun = crash_handler.RetryOnSystemCrash(
+              lambda d, t=test: self._RunTest(d, t),
+              device=dev)
+          consecutive_device_errors = 0
+          if isinstance(result, base_test_result.BaseTestResult):
+            results.AddResult(result)
+          elif isinstance(result, list):
+            results.AddResults(result)
+          else:
+            raise Exception(
+                'Unexpected result type: %s' % type(result).__name__)
+        except device_errors.CommandTimeoutError:
+          # Test timeouts don't count as device errors for the purpose
+          # of bad device detection.
+          consecutive_device_errors = 0
+
+          if isinstance(test, list):
+            results.AddResults(
+                base_test_result.BaseTestResult(
+                    self._GetUniqueTestName(t),
+                    base_test_result.ResultType.TIMEOUT)
+                for t in test)
+          else:
+            results.AddResult(
+                base_test_result.BaseTestResult(
+                    self._GetUniqueTestName(test),
+                    base_test_result.ResultType.TIMEOUT))
+        except Exception as e:  # pylint: disable=broad-except
+          if isinstance(tests, test_collection.TestCollection):
+            rerun = test
+          if (isinstance(e, device_errors.DeviceUnreachableError)
+              or not isinstance(e, base_error.BaseError)):
+            # If we get a device error but believe the device is still
+            # reachable, attempt to continue using it. Otherwise, raise
+            # the exception and terminate this run_tests_on_device call.
+            raise
+
+          consecutive_device_errors += 1
+          if consecutive_device_errors >= 3:
+            # We believe the device is still reachable and may still be usable,
+            # but if it fails repeatedly, we shouldn't attempt to keep using
+            # it.
+            logging.error('Repeated failures on device %s. Abandoning.',
+                          str(dev))
+            raise
+
+          logging.exception(
+              'Attempting to continue using device %s despite failure (%d/3).',
+              str(dev), consecutive_device_errors)
+
+        finally:
+          if isinstance(tests, test_collection.TestCollection):
+            if rerun:
+              tests.add(rerun)
+            tests.test_completed()
+
+      logging.info('Finished running tests on this device.')
+
+    def stop_tests(_signum, _frame):
+      logging.critical('Received SIGTERM. Stopping test execution.')
+      exit_now.set()
+      raise TestsTerminated()
+
+    try:
+      with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
+        self._env.ResetCurrentTry()
+        while self._env.current_try < self._env.max_tries and tests:
+          tries = self._env.current_try
+          grouped_tests = self._GroupTests(tests)
+          logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
+          if tries > 0 and self._env.recover_devices:
+            if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1
+                   for d in self._env.devices):
+              logging.info(
+                  'Attempting to recover devices due to known issue on L MR1. '
+                  'See crbug.com/787056 for details.')
+              self._env.parallel_devices.pMap(
+                  device_recovery.RecoverDevice, None)
+            elif tries + 1 == self._env.max_tries:
+              logging.info(
+                  'Attempting to recover devices prior to last test attempt.')
+              self._env.parallel_devices.pMap(
+                  device_recovery.RecoverDevice, None)
+          logging.info('Will run %d tests on %d devices: %s',
+                       len(tests), len(self._env.devices),
+                       ', '.join(str(d) for d in self._env.devices))
+          for t in tests:
+            logging.debug('  %s', t)
+
+          try_results = base_test_result.TestRunResults()
+          test_names = (self._GetUniqueTestName(t) for t in tests)
+          try_results.AddResults(
+              base_test_result.BaseTestResult(
+                  t, base_test_result.ResultType.NOTRUN)
+              for t in test_names if not t.endswith('*'))
+
+          # As soon as we know the names of the tests, we populate |results|.
+          # The tests in try_results will have their results updated by
+          # try_results.AddResult() as they are run.
+          results.append(try_results)
+
+          try:
+            if self._ShouldShard():
+              tc = test_collection.TestCollection(
+                  self._CreateShards(grouped_tests))
+              self._env.parallel_devices.pMap(
+                  run_tests_on_device, tc, try_results).pGet(None)
+            else:
+              self._env.parallel_devices.pMap(run_tests_on_device,
+                                              grouped_tests,
+                                              try_results).pGet(None)
+          except TestsTerminated:
+            for unknown_result in try_results.GetUnknown():
+              try_results.AddResult(
+                  base_test_result.BaseTestResult(
+                      unknown_result.GetName(),
+                      base_test_result.ResultType.TIMEOUT,
+                      log=_SIGTERM_TEST_LOG))
+            raise
+
+          self._env.IncrementCurrentTry()
+          tests = self._GetTestsToRetry(tests, try_results)
+
+          logging.info('FINISHED TRY #%d/%d', tries + 1, self._env.max_tries)
+          if tests:
+            logging.info('%d failed tests remain.', len(tests))
+          else:
+            logging.info('All tests completed.')
+    except TestsTerminated:
+      pass
+
+  def _GetTestsToRetry(self, tests, try_results):
+
+    def is_failure_result(test_result):
+      if isinstance(test_result, list):
+        return any(is_failure_result(r) for r in test_result)
+      return (
+          test_result is None
+          or test_result.GetType() not in (
+              base_test_result.ResultType.PASS,
+              base_test_result.ResultType.SKIP))
+
+    all_test_results = {r.GetName(): r for r in try_results.GetAll()}
+
+    tests_and_names = ((t, self._GetUniqueTestName(t)) for t in tests)
+
+    tests_and_results = {}
+    for test, name in tests_and_names:
+      if name.endswith('*'):
+        tests_and_results[name] = (
+            test,
+            [r for n, r in all_test_results.iteritems()
+             if fnmatch.fnmatch(n, name)])
+      else:
+        tests_and_results[name] = (test, all_test_results.get(name))
+
+    failed_tests_and_results = (
+        (test, result) for test, result in tests_and_results.itervalues()
+        if is_failure_result(result)
+    )
+
+    return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)]
+
+  def _ApplyExternalSharding(self, tests, shard_index, total_shards):
+    logging.info('Using external sharding settings. This is shard %d/%d',
+                 shard_index, total_shards)
+
+    if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
+      raise InvalidShardingSettings(shard_index, total_shards)
+
+    sharded_tests = []
+
+    # Group tests by tests that should run in the same test invocation - either
+    # unit tests or batched tests.
+    grouped_tests = self._GroupTests(tests)
+
+    # Partition grouped tests approximately evenly across shards.
+    partitioned_tests = self._PartitionTests(grouped_tests, total_shards,
+                                             float('inf'))
+    if len(partitioned_tests) <= shard_index:
+      return []
+    for t in partitioned_tests[shard_index]:
+      if isinstance(t, list):
+        sharded_tests.extend(t)
+      else:
+        sharded_tests.append(t)
+    return sharded_tests
+
+  # Partition tests evenly into |num_desired_partitions| partitions where
+  # possible. However, many constraints make partitioning perfectly impossible.
+  # If the max_partition_size isn't large enough, extra partitions may be
+  # created (infinite max size should always return precisely the desired
+  # number of partitions). Even if the |max_partition_size| is technically large
+  # enough to hold all of the tests in |num_desired_partitions|, we attempt to
+  # keep test order relatively stable to minimize flakes, so when tests are
+  # grouped (eg. batched tests), we cannot perfectly fill all paritions as that
+  # would require breaking up groups.
+  def _PartitionTests(self, tests, num_desired_partitions, max_partition_size):
+    # pylint: disable=no-self-use
+    partitions = []
+
+    # Sort by hash so we don't put all tests in a slow suite in the same
+    # partition.
+    tests = sorted(
+        tests,
+        key=lambda t: hash(
+            self._GetUniqueTestName(t[0] if isinstance(t, list) else t)))
+
+    def CountTestsIndividually(test):
+      if not isinstance(test, list):
+        return False
+      annotations = test[0]['annotations']
+      # UnitTests tests are really fast, so to balance shards better, count
+      # UnitTests Batches as single tests.
+      return ('Batch' not in annotations
+              or annotations['Batch']['value'] != 'UnitTests')
+
+    num_not_yet_allocated = sum(
+        [len(test) - 1 for test in tests if CountTestsIndividually(test)])
+    num_not_yet_allocated += len(tests)
+
+    # Fast linear partition approximation capped by max_partition_size. We
+    # cannot round-robin or otherwise re-order tests dynamically because we want
+    # test order to remain stable.
+    partition_size = min(num_not_yet_allocated // num_desired_partitions,
+                         max_partition_size)
+    partitions.append([])
+    last_partition_size = 0
+    for test in tests:
+      test_count = len(test) if CountTestsIndividually(test) else 1
+      num_not_yet_allocated -= test_count
+      # Make a new shard whenever we would overfill the previous one. However,
+      # if the size of the test group is larger than the max partition size on
+      # its own, just put the group in its own shard instead of splitting up the
+      # group.
+      if (last_partition_size + test_count > partition_size
+          and last_partition_size > 0):
+        num_desired_partitions -= 1
+        partitions.append([])
+        partitions[-1].append(test)
+        last_partition_size = test_count
+        if num_desired_partitions <= 0:
+          # Too many tests for number of partitions, just fill all partitions
+          # beyond num_desired_partitions.
+          partition_size = max_partition_size
+        else:
+          # Re-balance remaining partitions.
+          partition_size = min(num_not_yet_allocated // num_desired_partitions,
+                               max_partition_size)
+      else:
+        partitions[-1].append(test)
+        last_partition_size += test_count
+
+    if not partitions[-1]:
+      partitions.pop()
+    return partitions
+
+  def GetTool(self, device):
+    if str(device) not in self._tools:
+      self._tools[str(device)] = valgrind_tools.CreateTool(
+          self._env.tool, device)
+    return self._tools[str(device)]
+
+  def _CreateShards(self, tests):
+    raise NotImplementedError
+
+  def _GetUniqueTestName(self, test):
+    # pylint: disable=no-self-use
+    return test
+
+  def _ShouldRetry(self, test, result):
+    # pylint: disable=no-self-use,unused-argument
+    return True
+
+  def _GetTests(self):
+    raise NotImplementedError
+
+  def _GroupTests(self, tests):
+    # pylint: disable=no-self-use
+    return tests
+
+  def _RunTest(self, device, test):
+    raise NotImplementedError
+
+  def _ShouldShard(self):
+    raise NotImplementedError
+
+
+def SetAppCompatibilityFlagsIfNecessary(packages, device):
+  """Sets app compatibility flags on the given packages and device.
+
+  Args:
+    packages: A list of strings containing package names to apply flags to.
+    device: A DeviceUtils instance to apply the flags on.
+  """
+
+  def set_flag_for_packages(flag, enable):
+    enable_str = 'enable' if enable else 'disable'
+    for p in packages:
+      cmd = ['am', 'compat', enable_str, flag, p]
+      device.RunShellCommand(cmd)
+
+  sdk_version = device.build_version_sdk
+  if sdk_version >= version_codes.R:
+    # These flags are necessary to use the legacy storage permissions on R+.
+    # See crbug.com/1173699 for more information.
+    set_flag_for_packages('DEFAULT_SCOPED_STORAGE', False)
+    set_flag_for_packages('FORCE_ENABLE_SCOPED_STORAGE', False)
+
+
+class NoTestsError(Exception):
+  """Error for when no tests are found."""
diff --git a/src/build/android/pylib/local/device/local_device_test_run_test.py b/src/build/android/pylib/local/device/local_device_test_run_test.py
new file mode 100755
index 0000000..77bbc2e
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_test_run_test.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+import mock  # pylint: disable=import-error
+
+
+class SubstituteDeviceRootTest(unittest.TestCase):
+
+  def testNoneDevicePath(self):
+    self.assertEquals(
+        '/fake/device/root',
+        local_device_test_run.SubstituteDeviceRoot(
+            None, '/fake/device/root'))
+
+  def testStringDevicePath(self):
+    self.assertEquals(
+        '/another/fake/device/path',
+        local_device_test_run.SubstituteDeviceRoot(
+            '/another/fake/device/path', '/fake/device/root'))
+
+  def testListWithNoneDevicePath(self):
+    self.assertEquals(
+        '/fake/device/root/subpath',
+        local_device_test_run.SubstituteDeviceRoot(
+            [None, 'subpath'], '/fake/device/root'))
+
+  def testListWithoutNoneDevicePath(self):
+    self.assertEquals(
+        '/another/fake/device/path',
+        local_device_test_run.SubstituteDeviceRoot(
+            ['/', 'another', 'fake', 'device', 'path'],
+            '/fake/device/root'))
+
+
+class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun):
+
+  # pylint: disable=abstract-method
+
+  def __init__(self):
+    super(TestLocalDeviceTestRun, self).__init__(
+        mock.MagicMock(), mock.MagicMock())
+
+
+class TestLocalDeviceNonStringTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+
+  # pylint: disable=abstract-method
+
+  def __init__(self):
+    super(TestLocalDeviceNonStringTestRun, self).__init__(
+        mock.MagicMock(), mock.MagicMock())
+
+  def _GetUniqueTestName(self, test):
+    return test['name']
+
+
+class LocalDeviceTestRunTest(unittest.TestCase):
+
+  def testGetTestsToRetry_allTestsPassed(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(0, len(tests_to_retry))
+
+  def testGetTestsToRetry_testFailed(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test1', base_test_result.ResultType.FAIL),
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('Test1', tests_to_retry)
+
+  def testGetTestsToRetry_testUnknown(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = ['Test1'] + [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('Test1', tests_to_retry)
+
+  def testGetTestsToRetry_wildcardFilter_allPass(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = ['TestCase.*']
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(0, len(tests_to_retry))
+
+  def testGetTestsToRetry_wildcardFilter_oneFails(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.FAIL),
+    ]
+
+    tests = ['TestCase.*']
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('TestCase.*', tests_to_retry)
+
+  def testGetTestsToRetry_nonStringTests(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.FAIL),
+    ]
+
+    tests = [
+        {'name': 'TestCase.Test1'},
+        {'name': 'TestCase.Test2'},
+    ]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceNonStringTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIsInstance(tests_to_retry[0], dict)
+    self.assertEquals(tests[1], tests_to_retry[0])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/emulator/__init__.py b/src/build/android/pylib/local/emulator/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/emulator/avd.py b/src/build/android/pylib/local/emulator/avd.py
new file mode 100644
index 0000000..51365eb
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/avd.py
@@ -0,0 +1,606 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import json
+import logging
+import os
+import socket
+import stat
+import subprocess
+import threading
+
+from google.protobuf import text_format  # pylint: disable=import-error
+
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from devil.utils import cmd_helper
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+from pylib import constants
+from pylib.local.emulator import ini
+from pylib.local.emulator.proto import avd_pb2
+
+_ALL_PACKAGES = object()
+_DEFAULT_AVDMANAGER_PATH = os.path.join(
+    constants.ANDROID_SDK_ROOT, 'cmdline-tools', 'latest', 'bin', 'avdmanager')
+# Default to a 480dp mdpi screen (a relatively large phone).
+# See https://developer.android.com/training/multiscreen/screensizes
+# and https://developer.android.com/training/multiscreen/screendensities
+# for more information.
+_DEFAULT_SCREEN_DENSITY = 160
+_DEFAULT_SCREEN_HEIGHT = 960
+_DEFAULT_SCREEN_WIDTH = 480
+
+
+class AvdException(Exception):
+  """Raised when this module has a problem interacting with an AVD."""
+
+  def __init__(self, summary, command=None, stdout=None, stderr=None):
+    message_parts = [summary]
+    if command:
+      message_parts.append('  command: %s' % ' '.join(command))
+    if stdout:
+      message_parts.append('  stdout:')
+      message_parts.extend('    %s' % line for line in stdout.splitlines())
+    if stderr:
+      message_parts.append('  stderr:')
+      message_parts.extend('    %s' % line for line in stderr.splitlines())
+
+    super(AvdException, self).__init__('\n'.join(message_parts))
+
+
+def _Load(avd_proto_path):
+  """Loads an Avd proto from a textpb file at the given path.
+
+  Should not be called outside of this module.
+
+  Args:
+    avd_proto_path: path to a textpb file containing an Avd message.
+  """
+  with open(avd_proto_path) as avd_proto_file:
+    return text_format.Merge(avd_proto_file.read(), avd_pb2.Avd())
+
+
+class _AvdManagerAgent(object):
+  """Private utility for interacting with avdmanager."""
+
+  def __init__(self, avd_home, sdk_root):
+    """Create an _AvdManagerAgent.
+
+    Args:
+      avd_home: path to ANDROID_AVD_HOME directory.
+        Typically something like /path/to/dir/.android/avd
+      sdk_root: path to SDK root directory.
+    """
+    self._avd_home = avd_home
+    self._sdk_root = sdk_root
+
+    self._env = dict(os.environ)
+
+    # The avdmanager from cmdline-tools would look two levels
+    # up from toolsdir to find the SDK root.
+    # Pass avdmanager a fake directory under the directory in which
+    # we install the system images s.t. avdmanager can find the
+    # system images.
+    fake_tools_dir = os.path.join(self._sdk_root, 'non-existent-tools',
+                                  'non-existent-version')
+    self._env.update({
+        'ANDROID_AVD_HOME':
+        self._avd_home,
+        'AVDMANAGER_OPTS':
+        '-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir,
+    })
+
+  def Create(self, avd_name, system_image, force=False):
+    """Call `avdmanager create`.
+
+    Args:
+      avd_name: name of the AVD to create.
+      system_image: system image to use for the AVD.
+      force: whether to force creation, overwriting any existing
+        AVD with the same name.
+    """
+    create_cmd = [
+        _DEFAULT_AVDMANAGER_PATH,
+        '-v',
+        'create',
+        'avd',
+        '-n',
+        avd_name,
+        '-k',
+        system_image,
+    ]
+    if force:
+      create_cmd += ['--force']
+
+    create_proc = cmd_helper.Popen(
+        create_cmd,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        env=self._env)
+    output, error = create_proc.communicate(input='\n')
+    if create_proc.returncode != 0:
+      raise AvdException(
+          'AVD creation failed',
+          command=create_cmd,
+          stdout=output,
+          stderr=error)
+
+    for line in output.splitlines():
+      logging.info('  %s', line)
+
+  def Delete(self, avd_name):
+    """Call `avdmanager delete`.
+
+    Args:
+      avd_name: name of the AVD to delete.
+    """
+    delete_cmd = [
+        _DEFAULT_AVDMANAGER_PATH,
+        '-v',
+        'delete',
+        'avd',
+        '-n',
+        avd_name,
+    ]
+    try:
+      for line in cmd_helper.IterCmdOutputLines(delete_cmd, env=self._env):
+        logging.info('  %s', line)
+    except subprocess.CalledProcessError as e:
+      raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd)
+
+
+class AvdConfig(object):
+  """Represents a particular AVD configuration.
+
+  This class supports creation, installation, and execution of an AVD
+  from a given Avd proto message, as defined in
+  //build/android/pylib/local/emulator/proto/avd.proto.
+  """
+
+  def __init__(self, avd_proto_path):
+    """Create an AvdConfig object.
+
+    Args:
+      avd_proto_path: path to a textpb file containing an Avd message.
+    """
+    self._config = _Load(avd_proto_path)
+
+    self._emulator_home = os.path.join(constants.DIR_SOURCE_ROOT,
+                                       self._config.avd_package.dest_path)
+    self._emulator_sdk_root = os.path.join(
+        constants.DIR_SOURCE_ROOT, self._config.emulator_package.dest_path)
+    self._emulator_path = os.path.join(self._emulator_sdk_root, 'emulator',
+                                       'emulator')
+
+    self._initialized = False
+    self._initializer_lock = threading.Lock()
+
+  @property
+  def avd_settings(self):
+    return self._config.avd_settings
+
+  def Create(self,
+             force=False,
+             snapshot=False,
+             keep=False,
+             cipd_json_output=None,
+             dry_run=False):
+    """Create an instance of the AVD CIPD package.
+
+    This method:
+     - installs the requisite system image
+     - creates the AVD
+     - modifies the AVD's ini files to support running chromium tests
+       in chromium infrastructure
+     - optionally starts & stops the AVD for snapshotting (default no)
+     - By default creates and uploads an instance of the AVD CIPD package
+       (can be turned off by dry_run flag).
+     - optionally deletes the AVD (default yes)
+
+    Args:
+      force: bool indicating whether to force create the AVD.
+      snapshot: bool indicating whether to snapshot the AVD before creating
+        the CIPD package.
+      keep: bool indicating whether to keep the AVD after creating
+        the CIPD package.
+      cipd_json_output: string path to pass to `cipd create` via -json-output.
+      dry_run: When set to True, it will skip the CIPD package creation
+        after creating the AVD.
+    """
+    logging.info('Installing required packages.')
+    self._InstallCipdPackages(packages=[
+        self._config.emulator_package,
+        self._config.system_image_package,
+    ])
+
+    android_avd_home = os.path.join(self._emulator_home, 'avd')
+
+    if not os.path.exists(android_avd_home):
+      os.makedirs(android_avd_home)
+
+    avd_manager = _AvdManagerAgent(
+        avd_home=android_avd_home, sdk_root=self._emulator_sdk_root)
+
+    logging.info('Creating AVD.')
+    avd_manager.Create(
+        avd_name=self._config.avd_name,
+        system_image=self._config.system_image_name,
+        force=force)
+
+    try:
+      logging.info('Modifying AVD configuration.')
+
+      # Clear out any previous configuration or state from this AVD.
+      root_ini = os.path.join(android_avd_home,
+                              '%s.ini' % self._config.avd_name)
+      features_ini = os.path.join(self._emulator_home, 'advancedFeatures.ini')
+      avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)
+      config_ini = os.path.join(avd_dir, 'config.ini')
+
+      with ini.update_ini_file(root_ini) as root_ini_contents:
+        root_ini_contents['path.rel'] = 'avd/%s.avd' % self._config.avd_name
+
+      with ini.update_ini_file(features_ini) as features_ini_contents:
+        # features_ini file will not be refreshed by avdmanager during
+        # creation. So explicitly clear its content to exclude any leftover
+        # from previous creation.
+        features_ini_contents.clear()
+        features_ini_contents.update(self.avd_settings.advanced_features)
+
+      with ini.update_ini_file(config_ini) as config_ini_contents:
+        height = self.avd_settings.screen.height or _DEFAULT_SCREEN_HEIGHT
+        width = self.avd_settings.screen.width or _DEFAULT_SCREEN_WIDTH
+        density = self.avd_settings.screen.density or _DEFAULT_SCREEN_DENSITY
+
+        config_ini_contents.update({
+            'disk.dataPartition.size': '4G',
+            'hw.keyboard': 'yes',
+            'hw.lcd.density': density,
+            'hw.lcd.height': height,
+            'hw.lcd.width': width,
+        })
+
+        if self.avd_settings.ram_size:
+          config_ini_contents['hw.ramSize'] = self.avd_settings.ram_size
+
+      # Start & stop the AVD.
+      self._Initialize()
+      instance = _AvdInstance(self._emulator_path, self._emulator_home,
+                              self._config)
+      # Enable debug for snapshot when it is set to True
+      debug_tags = 'init,snapshot' if snapshot else None
+      instance.Start(
+          read_only=False, snapshot_save=snapshot, debug_tags=debug_tags)
+      # Android devices with full-disk encryption are encrypted on first boot,
+      # and then get decrypted to continue the boot process (See details in
+      # https://bit.ly/3agmjcM).
+      # Wait for this step to complete since it can take a while for old OSs
+      # like M, otherwise the avd may have "Encryption Unsuccessful" error.
+      device_utils.DeviceUtils(instance.serial).WaitUntilFullyBooted(
+          decrypt=True, timeout=180, retries=0)
+      instance.Stop()
+
+      # The multiinstance lock file seems to interfere with the emulator's
+      # operation in some circumstances (beyond the obvious -read-only ones),
+      # and there seems to be no mechanism by which it gets closed or deleted.
+      # See https://bit.ly/2pWQTH7 for context.
+      multiInstanceLockFile = os.path.join(avd_dir, 'multiinstance.lock')
+      if os.path.exists(multiInstanceLockFile):
+        os.unlink(multiInstanceLockFile)
+
+      package_def_content = {
+          'package':
+          self._config.avd_package.package_name,
+          'root':
+          self._emulator_home,
+          'install_mode':
+          'copy',
+          'data': [{
+              'dir': os.path.relpath(avd_dir, self._emulator_home)
+          }, {
+              'file': os.path.relpath(root_ini, self._emulator_home)
+          }, {
+              'file': os.path.relpath(features_ini, self._emulator_home)
+          }],
+      }
+
+      logging.info('Creating AVD CIPD package.')
+      logging.debug('ensure file content: %s',
+                    json.dumps(package_def_content, indent=2))
+
+      with tempfile_ext.TemporaryFileName(suffix='.json') as package_def_path:
+        with open(package_def_path, 'w') as package_def_file:
+          json.dump(package_def_content, package_def_file)
+
+        logging.info('  %s', self._config.avd_package.package_name)
+        cipd_create_cmd = [
+            'cipd',
+            'create',
+            '-pkg-def',
+            package_def_path,
+            '-tag',
+            'emulator_version:%s' % self._config.emulator_package.version,
+            '-tag',
+            'system_image_version:%s' %
+            self._config.system_image_package.version,
+        ]
+        if cipd_json_output:
+          cipd_create_cmd.extend([
+              '-json-output',
+              cipd_json_output,
+          ])
+        logging.info('running %r%s', cipd_create_cmd,
+                     ' (dry_run)' if dry_run else '')
+        if not dry_run:
+          try:
+            for line in cmd_helper.IterCmdOutputLines(cipd_create_cmd):
+              logging.info('    %s', line)
+          except subprocess.CalledProcessError as e:
+            raise AvdException(
+                'CIPD package creation failed: %s' % str(e),
+                command=cipd_create_cmd)
+
+    finally:
+      if not keep:
+        logging.info('Deleting AVD.')
+        avd_manager.Delete(avd_name=self._config.avd_name)
+
+  def Install(self, packages=_ALL_PACKAGES):
+    """Installs the requested CIPD packages and prepares them for use.
+
+    This includes making files writeable and revising some of the
+    emulator's internal config files.
+
+    Returns: None
+    Raises: AvdException on failure to install.
+    """
+    self._InstallCipdPackages(packages=packages)
+    self._MakeWriteable()
+    self._EditConfigs()
+
+  def _InstallCipdPackages(self, packages):
+    pkgs_by_dir = {}
+    if packages is _ALL_PACKAGES:
+      packages = [
+          self._config.avd_package,
+          self._config.emulator_package,
+          self._config.system_image_package,
+      ]
+    for pkg in packages:
+      if not pkg.dest_path in pkgs_by_dir:
+        pkgs_by_dir[pkg.dest_path] = []
+      pkgs_by_dir[pkg.dest_path].append(pkg)
+
+    for pkg_dir, pkgs in pkgs_by_dir.items():
+      logging.info('Installing packages in %s', pkg_dir)
+      cipd_root = os.path.join(constants.DIR_SOURCE_ROOT, pkg_dir)
+      if not os.path.exists(cipd_root):
+        os.makedirs(cipd_root)
+      ensure_path = os.path.join(cipd_root, '.ensure')
+      with open(ensure_path, 'w') as ensure_file:
+        # Make CIPD ensure that all files are present and correct,
+        # even if it thinks the package is installed.
+        ensure_file.write('$ParanoidMode CheckIntegrity\n\n')
+        for pkg in pkgs:
+          ensure_file.write('%s %s\n' % (pkg.package_name, pkg.version))
+          logging.info('  %s %s', pkg.package_name, pkg.version)
+      ensure_cmd = [
+          'cipd',
+          'ensure',
+          '-ensure-file',
+          ensure_path,
+          '-root',
+          cipd_root,
+      ]
+      try:
+        for line in cmd_helper.IterCmdOutputLines(ensure_cmd):
+          logging.info('    %s', line)
+      except subprocess.CalledProcessError as e:
+        raise AvdException(
+            'Failed to install CIPD package %s: %s' % (pkg.package_name,
+                                                       str(e)),
+            command=ensure_cmd)
+
+  def _MakeWriteable(self):
+    # The emulator requires that some files are writable.
+    for dirname, _, filenames in os.walk(self._emulator_home):
+      for f in filenames:
+        path = os.path.join(dirname, f)
+        mode = os.lstat(path).st_mode
+        if mode & stat.S_IRUSR:
+          mode = mode | stat.S_IWUSR
+        os.chmod(path, mode)
+
+  def _EditConfigs(self):
+    android_avd_home = os.path.join(self._emulator_home, 'avd')
+    avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)
+
+    config_path = os.path.join(avd_dir, 'config.ini')
+    if os.path.exists(config_path):
+      with open(config_path) as config_file:
+        config_contents = ini.load(config_file)
+    else:
+      config_contents = {}
+
+    config_contents['hw.sdCard'] = 'true'
+    if self.avd_settings.sdcard.size:
+      sdcard_path = os.path.join(avd_dir, 'cr-sdcard.img')
+      if not os.path.exists(sdcard_path):
+        mksdcard_path = os.path.join(
+            os.path.dirname(self._emulator_path), 'mksdcard')
+        mksdcard_cmd = [
+            mksdcard_path,
+            self.avd_settings.sdcard.size,
+            sdcard_path,
+        ]
+        cmd_helper.RunCmd(mksdcard_cmd)
+
+      config_contents['hw.sdCard.path'] = sdcard_path
+
+    with open(config_path, 'w') as config_file:
+      ini.dump(config_contents, config_file)
+
+  def _Initialize(self):
+    if self._initialized:
+      return
+
+    with self._initializer_lock:
+      if self._initialized:
+        return
+
+      # Emulator start-up looks for the adb daemon. Make sure it's running.
+      adb_wrapper.AdbWrapper.StartServer()
+
+      # Emulator start-up tries to check for the SDK root by looking for
+      # platforms/ and platform-tools/. Ensure they exist.
+      # See http://bit.ly/2YAkyFE for context.
+      required_dirs = [
+          os.path.join(self._emulator_sdk_root, 'platforms'),
+          os.path.join(self._emulator_sdk_root, 'platform-tools'),
+      ]
+      for d in required_dirs:
+        if not os.path.exists(d):
+          os.makedirs(d)
+
+  def CreateInstance(self):
+    """Creates an AVD instance without starting it.
+
+    Returns:
+      An _AvdInstance.
+    """
+    self._Initialize()
+    return _AvdInstance(self._emulator_path, self._emulator_home, self._config)
+
+  def StartInstance(self):
+    """Starts an AVD instance.
+
+    Returns:
+      An _AvdInstance.
+    """
+    instance = self.CreateInstance()
+    instance.Start()
+    return instance
+
+
+class _AvdInstance(object):
+  """Represents a single running instance of an AVD.
+
+  This class should only be created directly by AvdConfig.StartInstance,
+  but its other methods can be freely called.
+  """
+
+  def __init__(self, emulator_path, emulator_home, avd_config):
+    """Create an _AvdInstance object.
+
+    Args:
+      emulator_path: path to the emulator binary.
+      emulator_home: path to the emulator home directory.
+      avd_config: AVD config proto.
+    """
+    self._avd_config = avd_config
+    self._avd_name = avd_config.avd_name
+    self._emulator_home = emulator_home
+    self._emulator_path = emulator_path
+    self._emulator_proc = None
+    self._emulator_serial = None
+    self._sink = None
+
+  def __str__(self):
+    return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self)))
+
+  def Start(self,
+            read_only=True,
+            snapshot_save=False,
+            window=False,
+            writable_system=False,
+            debug_tags=None):
+    """Starts the emulator running an instance of the given AVD."""
+
+    with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing(
+        socket.socket(socket.AF_UNIX))) as sock:
+      sock.bind(socket_path)
+      emulator_cmd = [
+          self._emulator_path,
+          '-avd',
+          self._avd_name,
+          '-report-console',
+          'unix:%s' % socket_path,
+          '-no-boot-anim',
+          # Set the gpu mode to swiftshader_indirect otherwise the avd may exit
+          # with the error "change of render" under window mode
+          '-gpu',
+          'swiftshader_indirect',
+      ]
+
+      if read_only:
+        emulator_cmd.append('-read-only')
+      if not snapshot_save:
+        emulator_cmd.append('-no-snapshot-save')
+      if writable_system:
+        emulator_cmd.append('-writable-system')
+      if debug_tags:
+        emulator_cmd.extend(['-debug', debug_tags])
+
+      emulator_env = {}
+      if self._emulator_home:
+        emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home
+      if window:
+        if 'DISPLAY' in os.environ:
+          emulator_env['DISPLAY'] = os.environ.get('DISPLAY')
+        else:
+          raise AvdException('Emulator failed to start: DISPLAY not defined')
+      else:
+        emulator_cmd.append('-no-window')
+
+      sock.listen(1)
+
+      logging.info('Starting emulator with commands: %s',
+                   ' '.join(emulator_cmd))
+
+      # TODO(jbudorick): Add support for logging emulator stdout & stderr at
+      # higher logging levels.
+      # Enable the emulator log when debug_tags is set.
+      if not debug_tags:
+        self._sink = open('/dev/null', 'w')
+      self._emulator_proc = cmd_helper.Popen(
+          emulator_cmd, stdout=self._sink, stderr=self._sink, env=emulator_env)
+
+      # Waits for the emulator to report its serial as requested via
+      # -report-console. See http://bit.ly/2lK3L18 for more.
+      def listen_for_serial(s):
+        logging.info('Waiting for connection from emulator.')
+        with contextlib.closing(s.accept()[0]) as conn:
+          val = conn.recv(1024)
+          return 'emulator-%d' % int(val)
+
+      try:
+        self._emulator_serial = timeout_retry.Run(
+            listen_for_serial, timeout=30, retries=0, args=[sock])
+        logging.info('%s started', self._emulator_serial)
+      except Exception as e:
+        self.Stop()
+        raise AvdException('Emulator failed to start: %s' % str(e))
+
+  def Stop(self):
+    """Stops the emulator process."""
+    if self._emulator_proc:
+      if self._emulator_proc.poll() is None:
+        if self._emulator_serial:
+          device_utils.DeviceUtils(self._emulator_serial).adb.Emu('kill')
+        else:
+          self._emulator_proc.terminate()
+        self._emulator_proc.wait()
+      self._emulator_proc = None
+
+    if self._sink:
+      self._sink.close()
+      self._sink = None
+
+  @property
+  def serial(self):
+    return self._emulator_serial
diff --git a/src/build/android/pylib/local/emulator/ini.py b/src/build/android/pylib/local/emulator/ini.py
new file mode 100644
index 0000000..8f16c33
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/ini.py
@@ -0,0 +1,58 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic .ini encoding and decoding."""
+
+from __future__ import absolute_import
+import contextlib
+import os
+
+
+def loads(ini_str, strict=True):
+  ret = {}
+  for line in ini_str.splitlines():
+    key, val = line.split('=', 1)
+    key = key.strip()
+    val = val.strip()
+    if strict and key in ret:
+      raise ValueError('Multiple entries present for key "%s"' % key)
+    ret[key] = val
+
+  return ret
+
+
+def load(fp):
+  return loads(fp.read())
+
+
+def dumps(obj):
+  ret = ''
+  for k, v in sorted(obj.items()):
+    ret += '%s = %s\n' % (k, str(v))
+  return ret
+
+
+def dump(obj, fp):
+  fp.write(dumps(obj))
+
+
+@contextlib.contextmanager
+def update_ini_file(ini_file_path):
+  """Load and update the contents of an ini file.
+
+  Args:
+    ini_file_path: A string containing the absolute path of the ini file.
+  Yields:
+    The contents of the file, as a dict
+  """
+  if os.path.exists(ini_file_path):
+    with open(ini_file_path) as ini_file:
+      ini_contents = load(ini_file)
+  else:
+    ini_contents = {}
+
+  yield ini_contents
+
+  with open(ini_file_path, 'w') as ini_file:
+    dump(ini_contents, ini_file)
diff --git a/src/build/android/pylib/local/emulator/ini_test.py b/src/build/android/pylib/local/emulator/ini_test.py
new file mode 100755
index 0000000..0cf9250
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/ini_test.py
@@ -0,0 +1,69 @@
+#! /usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for ini.py."""
+
+from __future__ import absolute_import
+import textwrap
+import unittest
+
+from pylib.local.emulator import ini
+
+
+class IniTest(unittest.TestCase):
+  def testLoadsBasic(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz= example
+        bar.bad =/path/to/thing
+        """)
+    expected = {
+        'foo.bar': '1',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    self.assertEqual(expected, ini.loads(ini_str))
+
+  def testLoadsStrictFailure(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz = example
+        bar.bad = /path/to/thing
+        foo.bar = duplicate
+        """)
+    with self.assertRaises(ValueError):
+      ini.loads(ini_str, strict=True)
+
+  def testLoadsPermissive(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz = example
+        bar.bad = /path/to/thing
+        foo.bar = duplicate
+        """)
+    expected = {
+        'foo.bar': 'duplicate',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    self.assertEqual(expected, ini.loads(ini_str, strict=False))
+
+  def testDumpsBasic(self):
+    ini_contents = {
+        'foo.bar': '1',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    # ini.dumps is expected to dump to string alphabetically
+    # by key.
+    expected = textwrap.dedent("""\
+        bar.bad = /path/to/thing
+        foo.bar = 1
+        foo.baz = example
+        """)
+    self.assertEqual(expected, ini.dumps(ini_contents))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/local/emulator/local_emulator_environment.py b/src/build/android/pylib/local/emulator/local_emulator_environment.py
new file mode 100644
index 0000000..1343d8c
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/local_emulator_environment.py
@@ -0,0 +1,102 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil import base_error
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import parallelizer
+from devil.utils import reraiser_thread
+from devil.utils import timeout_retry
+from pylib.local.device import local_device_environment
+from pylib.local.emulator import avd
+
+# Mirroring https://bit.ly/2OjuxcS#23
+_MAX_ANDROID_EMULATORS = 16
+
+
+class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
+
+  def __init__(self, args, output_manager, error_func):
+    super(LocalEmulatorEnvironment, self).__init__(args, output_manager,
+                                                   error_func)
+    self._avd_config = avd.AvdConfig(args.avd_config)
+    if args.emulator_count < 1:
+      error_func('--emulator-count must be >= 1')
+    elif args.emulator_count >= _MAX_ANDROID_EMULATORS:
+      logging.warning('--emulator-count capped at 16.')
+    self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count)
+    self._emulator_window = args.emulator_window
+    self._writable_system = ((hasattr(args, 'use_webview_provider')
+                              and args.use_webview_provider)
+                             or (hasattr(args, 'replace_system_package')
+                                 and args.replace_system_package)
+                             or (hasattr(args, 'system_packages_to_remove')
+                                 and args.system_packages_to_remove))
+
+    self._emulator_instances = []
+    self._device_serials = []
+
+  #override
+  def SetUp(self):
+    self._avd_config.Install()
+
+    emulator_instances = [
+        self._avd_config.CreateInstance() for _ in range(self._emulator_count)
+    ]
+
+    def start_emulator_instance(e):
+
+      def impl(e):
+        try:
+          e.Start(
+              window=self._emulator_window,
+              writable_system=self._writable_system)
+        except avd.AvdException:
+          logging.exception('Failed to start emulator instance.')
+          return None
+        try:
+          device_utils.DeviceUtils(e.serial).WaitUntilFullyBooted()
+        except base_error.BaseError:
+          e.Stop()
+          raise
+        return e
+
+      def retry_on_timeout(exc):
+        return (isinstance(exc, device_errors.CommandTimeoutError)
+                or isinstance(exc, reraiser_thread.TimeoutError))
+
+      return timeout_retry.Run(
+          impl,
+          timeout=120 if self._writable_system else 30,
+          retries=2,
+          args=[e],
+          retry_if_func=retry_on_timeout)
+
+    parallel_emulators = parallelizer.SyncParallelizer(emulator_instances)
+    self._emulator_instances = [
+        emu
+        for emu in parallel_emulators.pMap(start_emulator_instance).pGet(None)
+        if emu is not None
+    ]
+    self._device_serials = [e.serial for e in self._emulator_instances]
+
+    if not self._emulator_instances:
+      raise Exception('Failed to start any instances of the emulator.')
+    elif len(self._emulator_instances) < self._emulator_count:
+      logging.warning(
+          'Running with fewer emulator instances than requested (%d vs %d)',
+          len(self._emulator_instances), self._emulator_count)
+
+    super(LocalEmulatorEnvironment, self).SetUp()
+
+  #override
+  def TearDown(self):
+    try:
+      super(LocalEmulatorEnvironment, self).TearDown()
+    finally:
+      parallelizer.SyncParallelizer(self._emulator_instances).Stop()
diff --git a/src/build/android/pylib/local/emulator/proto/__init__.py b/src/build/android/pylib/local/emulator/proto/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/emulator/proto/avd.proto b/src/build/android/pylib/local/emulator/proto/avd.proto
new file mode 100644
index 0000000..b06da49
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/avd.proto
@@ -0,0 +1,75 @@
+
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto3";
+
+package tools.android.avd.proto;
+
+message CIPDPackage {
+  // CIPD package name.
+  string package_name = 1;
+  // CIPD package version to use.
+  // Ignored when creating AVD packages.
+  string version = 2;
+  // Path into which the package should be installed.
+  // src-relative.
+  string dest_path = 3;
+}
+
+message ScreenSettings {
+  // Screen height in pixels.
+  uint32 height = 1;
+
+  // Screen width in pixels.
+  uint32 width = 2;
+
+  // Scren density in dpi.
+  uint32 density = 3;
+}
+
+message SdcardSettings {
+  // Size of the sdcard that should be created for this AVD.
+  // Can be anything that `mksdcard` or `avdmanager -c` would accept:
+  //   - a number of bytes
+  //   - a number followed by K, M, or G, indicating that many
+  //     KiB, MiB, or GiB, respectively.
+  string size = 1;
+}
+
+message AvdSettings {
+  // Settings pertaining to the AVD's screen.
+  ScreenSettings screen = 1;
+
+  // Settings pertaining to the AVD's sdcard.
+  SdcardSettings sdcard = 2;
+
+  // Advanced Features for AVD. The <key,value> pairs here will override the
+  // default ones in the given system image.
+  // See https://bit.ly/2P1qK2X for all the available keys.
+  // The values should be on, off, default, or null
+  map<string, string> advanced_features = 3;
+
+  // The physical RAM size on the device, in megabytes.
+  uint32 ram_size = 4;
+}
+
+message Avd {
+  // The emulator to use in running the AVD.
+  CIPDPackage emulator_package = 1;
+
+  // The system image to use.
+  CIPDPackage system_image_package = 2;
+  // The name of the system image to use, as reported by sdkmanager.
+  string system_image_name = 3;
+
+  // The AVD to create or use.
+  // (Only the package_name is used during AVD creation.)
+  CIPDPackage avd_package = 4;
+  // The name of the AVD to create or use.
+  string avd_name = 5;
+
+  // How to configure the AVD at creation.
+  AvdSettings avd_settings = 6;
+}
diff --git a/src/build/android/pylib/local/emulator/proto/avd_pb2.py b/src/build/android/pylib/local/emulator/proto/avd_pb2.py
new file mode 100644
index 0000000..49cc1aa
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/avd_pb2.py
@@ -0,0 +1,362 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: avd.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='avd.proto',
+  package='tools.android.avd.proto',
+  syntax='proto3',
+  serialized_options=None,
+  serialized_pb=b'\n\tavd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"@\n\x0eScreenSettings\x12\x0e\n\x06height\x18\x01 \x01(\r\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0f\n\x07\x64\x65nsity\x18\x03 \x01(\r\"\x1e\n\x0eSdcardSettings\x12\x0c\n\x04size\x18\x01 \x01(\t\"\xa1\x02\n\x0b\x41vdSettings\x12\x37\n\x06screen\x18\x01 \x01(\x0b\x32\'.tools.android.avd.proto.ScreenSettings\x12\x37\n\x06sdcard\x18\x02 \x01(\x0b\x32\'.tools.android.avd.proto.SdcardSettings\x12U\n\x11\x61\x64vanced_features\x18\x03 \x03(\x0b\x32:.tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry\x12\x10\n\x08ram_size\x18\x04 \x01(\r\x1a\x37\n\x15\x41\x64vancedFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xad\x02\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\t\x12:\n\x0c\x61vd_settings\x18\x06 \x01(\x0b\x32$.tools.android.avd.proto.AvdSettingsb\x06proto3'
+)
+
+
+
+
+_CIPDPACKAGE = _descriptor.Descriptor(
+  name='CIPDPackage',
+  full_name='tools.android.avd.proto.CIPDPackage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='package_name', full_name='tools.android.avd.proto.CIPDPackage.package_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='tools.android.avd.proto.CIPDPackage.version', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dest_path', full_name='tools.android.avd.proto.CIPDPackage.dest_path', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=38,
+  serialized_end=109,
+)
+
+
+_SCREENSETTINGS = _descriptor.Descriptor(
+  name='ScreenSettings',
+  full_name='tools.android.avd.proto.ScreenSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='height', full_name='tools.android.avd.proto.ScreenSettings.height', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='width', full_name='tools.android.avd.proto.ScreenSettings.width', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='density', full_name='tools.android.avd.proto.ScreenSettings.density', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=111,
+  serialized_end=175,
+)
+
+
+_SDCARDSETTINGS = _descriptor.Descriptor(
+  name='SdcardSettings',
+  full_name='tools.android.avd.proto.SdcardSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='size', full_name='tools.android.avd.proto.SdcardSettings.size', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=177,
+  serialized_end=207,
+)
+
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY = _descriptor.Descriptor(
+  name='AdvancedFeaturesEntry',
+  full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=b'8\001',
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=444,
+  serialized_end=499,
+)
+
+_AVDSETTINGS = _descriptor.Descriptor(
+  name='AvdSettings',
+  full_name='tools.android.avd.proto.AvdSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='screen', full_name='tools.android.avd.proto.AvdSettings.screen', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sdcard', full_name='tools.android.avd.proto.AvdSettings.sdcard', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='advanced_features', full_name='tools.android.avd.proto.AvdSettings.advanced_features', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ram_size', full_name='tools.android.avd.proto.AvdSettings.ram_size', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_AVDSETTINGS_ADVANCEDFEATURESENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=210,
+  serialized_end=499,
+)
+
+
+_AVD = _descriptor.Descriptor(
+  name='Avd',
+  full_name='tools.android.avd.proto.Avd',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='emulator_package', full_name='tools.android.avd.proto.Avd.emulator_package', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='system_image_package', full_name='tools.android.avd.proto.Avd.system_image_package', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='system_image_name', full_name='tools.android.avd.proto.Avd.system_image_name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_package', full_name='tools.android.avd.proto.Avd.avd_package', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_name', full_name='tools.android.avd.proto.Avd.avd_name', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_settings', full_name='tools.android.avd.proto.Avd.avd_settings', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=502,
+  serialized_end=803,
+)
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY.containing_type = _AVDSETTINGS
+_AVDSETTINGS.fields_by_name['screen'].message_type = _SCREENSETTINGS
+_AVDSETTINGS.fields_by_name['sdcard'].message_type = _SDCARDSETTINGS
+_AVDSETTINGS.fields_by_name['advanced_features'].message_type = _AVDSETTINGS_ADVANCEDFEATURESENTRY
+_AVD.fields_by_name['emulator_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['system_image_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['avd_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['avd_settings'].message_type = _AVDSETTINGS
+DESCRIPTOR.message_types_by_name['CIPDPackage'] = _CIPDPACKAGE
+DESCRIPTOR.message_types_by_name['ScreenSettings'] = _SCREENSETTINGS
+DESCRIPTOR.message_types_by_name['SdcardSettings'] = _SDCARDSETTINGS
+DESCRIPTOR.message_types_by_name['AvdSettings'] = _AVDSETTINGS
+DESCRIPTOR.message_types_by_name['Avd'] = _AVD
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+CIPDPackage = _reflection.GeneratedProtocolMessageType('CIPDPackage', (_message.Message,), {
+  'DESCRIPTOR' : _CIPDPACKAGE,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.CIPDPackage)
+  })
+_sym_db.RegisterMessage(CIPDPackage)
+
+ScreenSettings = _reflection.GeneratedProtocolMessageType('ScreenSettings', (_message.Message,), {
+  'DESCRIPTOR' : _SCREENSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.ScreenSettings)
+  })
+_sym_db.RegisterMessage(ScreenSettings)
+
+SdcardSettings = _reflection.GeneratedProtocolMessageType('SdcardSettings', (_message.Message,), {
+  'DESCRIPTOR' : _SDCARDSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.SdcardSettings)
+  })
+_sym_db.RegisterMessage(SdcardSettings)
+
+AvdSettings = _reflection.GeneratedProtocolMessageType('AvdSettings', (_message.Message,), {
+
+  'AdvancedFeaturesEntry' : _reflection.GeneratedProtocolMessageType('AdvancedFeaturesEntry', (_message.Message,), {
+    'DESCRIPTOR' : _AVDSETTINGS_ADVANCEDFEATURESENTRY,
+    '__module__' : 'avd_pb2'
+    # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry)
+    })
+  ,
+  'DESCRIPTOR' : _AVDSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings)
+  })
+_sym_db.RegisterMessage(AvdSettings)
+_sym_db.RegisterMessage(AvdSettings.AdvancedFeaturesEntry)
+
+Avd = _reflection.GeneratedProtocolMessageType('Avd', (_message.Message,), {
+  'DESCRIPTOR' : _AVD,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.Avd)
+  })
+_sym_db.RegisterMessage(Avd)
+
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/pylib/local/local_test_server_spawner.py b/src/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000..f21f1be
--- /dev/null
+++ b/src/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,101 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import json
+import time
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil.android import forwarder
+from devil.android import ports
+from pylib.base import test_server
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import chrome_test_server_spawner
+
+
+# The tests should not need more than one test server instance.
+MAX_TEST_SERVER_INSTANCES = 1
+
+
+def _WaitUntil(predicate, max_attempts=5):
+  """Blocks until the provided predicate (function) is true.
+
+  Returns:
+    Whether the provided predicate was satisfied once (before the timeout).
+  """
+  sleep_time_sec = 0.025
+  for _ in range(1, max_attempts):
+    if predicate():
+      return True
+    time.sleep(sleep_time_sec)
+    sleep_time_sec = min(1, sleep_time_sec * 2)  # Don't wait more than 1 sec.
+  return False
+
+
+class PortForwarderAndroid(chrome_test_server_spawner.PortForwarder):
+  def __init__(self, device, tool):
+    self.device = device
+    self.tool = tool
+
+  def Map(self, port_pairs):
+    forwarder.Forwarder.Map(port_pairs, self.device, self.tool)
+
+  def GetDevicePortForHostPort(self, host_port):
+    return forwarder.Forwarder.DevicePortForHostPort(host_port)
+
+  def WaitHostPortAvailable(self, port):
+    return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+  def WaitPortNotAvailable(self, port):
+    return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+  def WaitDevicePortReady(self, port):
+    return _WaitUntil(lambda: ports.IsDevicePortUsed(self.device, port))
+
+  def Unmap(self, device_port):
+    forwarder.Forwarder.UnmapDevicePort(device_port, self.device)
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+  def __init__(self, port, device, tool):
+    super(LocalTestServerSpawner, self).__init__()
+    self._device = device
+    self._spawning_server = chrome_test_server_spawner.SpawningServer(
+        port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES)
+    self._tool = tool
+
+  @property
+  def server_address(self):
+    return self._spawning_server.server.server_address
+
+  @property
+  def port(self):
+    return self.server_address[1]
+
+  #override
+  def SetUp(self):
+    # See net/test/spawned_test_server/remote_test_server.h for description of
+    # the fields in the config file.
+    test_server_config = json.dumps({
+      'spawner_url_base': 'http://localhost:%d' % self.port
+    })
+    self._device.WriteFile(
+        '%s/net-test-server-config' % self._device.GetExternalStoragePath(),
+        test_server_config)
+    forwarder.Forwarder.Map(
+        [(self.port, self.port)], self._device, self._tool)
+    self._spawning_server.Start()
+
+  #override
+  def Reset(self):
+    self._spawning_server.CleanupState()
+
+  #override
+  def TearDown(self):
+    self.Reset()
+    self._spawning_server.Stop()
+    forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
diff --git a/src/build/android/pylib/local/machine/__init__.py b/src/build/android/pylib/local/machine/__init__.py
new file mode 100644
index 0000000..ca3e206
--- /dev/null
+++ b/src/build/android/pylib/local/machine/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/machine/local_machine_environment.py b/src/build/android/pylib/local/machine/local_machine_environment.py
new file mode 100644
index 0000000..d198f89
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_environment.py
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import devil_chromium
+from pylib import constants
+from pylib.base import environment
+
+
+class LocalMachineEnvironment(environment.Environment):
+
+  def __init__(self, _args, output_manager, _error_func):
+    super(LocalMachineEnvironment, self).__init__(output_manager)
+
+    devil_chromium.Initialize(
+        output_directory=constants.GetOutDirectory())
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
diff --git a/src/build/android/pylib/local/machine/local_machine_junit_test_run.py b/src/build/android/pylib/local/machine/local_machine_junit_test_run.py
new file mode 100644
index 0000000..a64b63b
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -0,0 +1,309 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import collections
+import json
+import logging
+import multiprocessing
+import os
+import select
+import subprocess
+import sys
+import zipfile
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import host_paths
+from pylib.results import json_results
+from py_utils import tempfile_ext
+
+
+# These Test classes are used for running tests and are excluded in the test
+# runner. See:
+# https://android.googlesource.com/platform/frameworks/testing/+/android-support-test/runner/src/main/java/android/support/test/internal/runner/TestRequestBuilder.java
+# base/test/android/javatests/src/org/chromium/base/test/BaseChromiumAndroidJUnitRunner.java # pylint: disable=line-too-long
+_EXCLUDED_CLASSES_PREFIXES = ('android', 'junit', 'org/bouncycastle/util',
+                              'org/hamcrest', 'org/junit', 'org/mockito')
+
+# Suites we shouldn't shard, usually because they don't contain enough test
+# cases.
+_EXCLUDED_SUITES = {
+    'password_check_junit_tests',
+    'touch_to_fill_junit_tests',
+}
+
+
+# It can actually take longer to run if you shard too much, especially on
+# smaller suites. Locally media_base_junit_tests takes 4.3 sec with 1 shard,
+# and 6 sec with 2 or more shards.
+_MIN_CLASSES_PER_SHARD = 8
+
+
+class LocalMachineJunitTestRun(test_run.TestRun):
+  def __init__(self, env, test_instance):
+    super(LocalMachineJunitTestRun, self).__init__(env, test_instance)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    pass
+
+  def _CreateJarArgsList(self, json_result_file_paths, group_test_list, shards):
+    # Creates a list of jar_args. The important thing is each jar_args list
+    # has a different json_results file for writing test results to and that
+    # each list of jar_args has its own test to run as specified in the
+    # -gtest-filter.
+    jar_args_list = [['-json-results-file', result_file]
+                     for result_file in json_result_file_paths]
+    for index, jar_arg in enumerate(jar_args_list):
+      if shards > 1:
+        jar_arg.extend(['-gtest-filter', ':'.join(group_test_list[index])])
+      elif self._test_instance.test_filter:
+        jar_arg.extend(['-gtest-filter', self._test_instance.test_filter])
+
+      if self._test_instance.package_filter:
+        jar_arg.extend(['-package-filter', self._test_instance.package_filter])
+      if self._test_instance.runner_filter:
+        jar_arg.extend(['-runner-filter', self._test_instance.runner_filter])
+
+    return jar_args_list
+
+  def _CreateJvmArgsList(self):
+    # Creates a list of jvm_args (robolectric, code coverage, etc...)
+    jvm_args = [
+        '-Drobolectric.dependency.dir=%s' %
+        self._test_instance.robolectric_runtime_deps_dir,
+        '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,
+        '-Drobolectric.resourcesMode=binary',
+    ]
+    if logging.getLogger().isEnabledFor(logging.INFO):
+      jvm_args += ['-Drobolectric.logging=stdout']
+    if self._test_instance.debug_socket:
+      jvm_args += [
+          '-agentlib:jdwp=transport=dt_socket'
+          ',server=y,suspend=y,address=%s' % self._test_instance.debug_socket
+      ]
+
+    if self._test_instance.coverage_dir:
+      if not os.path.exists(self._test_instance.coverage_dir):
+        os.makedirs(self._test_instance.coverage_dir)
+      elif not os.path.isdir(self._test_instance.coverage_dir):
+        raise Exception('--coverage-dir takes a directory, not file path.')
+      if self._test_instance.coverage_on_the_fly:
+        jacoco_coverage_file = os.path.join(
+            self._test_instance.coverage_dir,
+            '%s.exec' % self._test_instance.suite)
+        jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                         'third_party', 'jacoco', 'lib',
+                                         'jacocoagent.jar')
+
+        # inclnolocationclasses is false to prevent no class def found error.
+        jacoco_args = '-javaagent:{}=destfile={},inclnolocationclasses=false'
+        jvm_args.append(
+            jacoco_args.format(jacoco_agent_path, jacoco_coverage_file))
+      else:
+        jvm_args.append('-Djacoco-agent.destfile=%s' %
+                        os.path.join(self._test_instance.coverage_dir,
+                                     '%s.exec' % self._test_instance.suite))
+
+    return jvm_args
+
+  #override
+  def RunTests(self, results):
+    wrapper_path = os.path.join(constants.GetOutDirectory(), 'bin', 'helper',
+                                self._test_instance.suite)
+
+    # This avoids searching through the classparth jars for tests classes,
+    # which takes about 1-2 seconds.
+    # Do not shard when a test filter is present since we do not know at this
+    # point which tests will be filtered out.
+    if (self._test_instance.shards == 1 or self._test_instance.test_filter
+        or self._test_instance.suite in _EXCLUDED_SUITES):
+      test_classes = []
+      shards = 1
+    else:
+      test_classes = _GetTestClasses(wrapper_path)
+      shards = ChooseNumOfShards(test_classes, self._test_instance.shards)
+
+    logging.info('Running tests on %d shard(s).', shards)
+    group_test_list = GroupTestsForShard(shards, test_classes)
+
+    with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+      cmd_list = [[wrapper_path] for _ in range(shards)]
+      json_result_file_paths = [
+          os.path.join(temp_dir, 'results%d.json' % i) for i in range(shards)
+      ]
+      jar_args_list = self._CreateJarArgsList(json_result_file_paths,
+                                              group_test_list, shards)
+      for i in range(shards):
+        cmd_list[i].extend(['--jar-args', '"%s"' % ' '.join(jar_args_list[i])])
+
+      jvm_args = self._CreateJvmArgsList()
+      if jvm_args:
+        for cmd in cmd_list:
+          cmd.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
+
+      AddPropertiesJar(cmd_list, temp_dir, self._test_instance.resource_apk)
+
+      procs = [
+          subprocess.Popen(cmd,
+                           stdout=subprocess.PIPE,
+                           stderr=subprocess.STDOUT) for cmd in cmd_list
+      ]
+      PrintProcessesStdout(procs)
+
+      results_list = []
+      try:
+        for json_file_path in json_result_file_paths:
+          with open(json_file_path, 'r') as f:
+            results_list += json_results.ParseResultsFromJson(
+                json.loads(f.read()))
+      except IOError:
+        # In the case of a failure in the JUnit or Robolectric test runner
+        # the output json file may never be written.
+        results_list = [
+          base_test_result.BaseTestResult(
+              'Test Runner Failure', base_test_result.ResultType.UNKNOWN)
+        ]
+
+      test_run_results = base_test_result.TestRunResults()
+      test_run_results.AddResults(results_list)
+      results.append(test_run_results)
+
+  #override
+  def TearDown(self):
+    pass
+
+
+def AddPropertiesJar(cmd_list, temp_dir, resource_apk):
+  # Create properties file for Robolectric test runners so they can find the
+  # binary resources.
+  properties_jar_path = os.path.join(temp_dir, 'properties.jar')
+  with zipfile.ZipFile(properties_jar_path, 'w') as z:
+    z.writestr('com/android/tools/test_config.properties',
+               'android_resource_apk=%s' % resource_apk)
+
+  for cmd in cmd_list:
+    cmd.extend(['--classpath', properties_jar_path])
+
+
+def ChooseNumOfShards(test_classes, shards):
+  # Don't override requests to not shard.
+  if shards == 1:
+    return 1
+
+  # Sharding doesn't reduce runtime on just a few tests.
+  if shards > (len(test_classes) // _MIN_CLASSES_PER_SHARD) or shards < 1:
+    shards = max(1, (len(test_classes) // _MIN_CLASSES_PER_SHARD))
+
+  # Local tests of explicit --shard values show that max speed is achieved
+  # at cpu_count() / 2.
+  # Using -XX:TieredStopAtLevel=1 is required for this result. The flag reduces
+  # CPU time by two-thirds, making sharding more effective.
+  shards = max(1, min(shards, multiprocessing.cpu_count() // 2))
+  # Can have at minimum one test_class per shard.
+  shards = min(len(test_classes), shards)
+
+  return shards
+
+
+def GroupTestsForShard(num_of_shards, test_classes):
+  """Groups tests that will be ran on each shard.
+
+  Args:
+    num_of_shards: number of shards to split tests between.
+    test_classes: A list of test_class files in the jar.
+
+  Return:
+    Returns a dictionary containing a list of test classes.
+  """
+  test_dict = {i: [] for i in range(num_of_shards)}
+
+  # Round robin test distribiution to reduce chance that a sequential group of
+  # classes all have an unusually high number of tests.
+  for count, test_cls in enumerate(test_classes):
+    test_cls = test_cls.replace('.class', '*')
+    test_cls = test_cls.replace('/', '.')
+    test_dict[count % num_of_shards].append(test_cls)
+
+  return test_dict
+
+
+def PrintProcessesStdout(procs):
+  """Prints the stdout of all the processes.
+
+  Buffers the stdout of the processes and prints it when finished.
+
+  Args:
+    procs: A list of subprocesses.
+
+  Returns: N/A
+  """
+  streams = [p.stdout for p in procs]
+  outputs = collections.defaultdict(list)
+  first_fd = streams[0].fileno()
+
+  while streams:
+    rstreams, _, _ = select.select(streams, [], [])
+    for stream in rstreams:
+      line = stream.readline()
+      if line:
+        # Print out just one output so user can see work being done rather
+        # than waiting for it all at the end.
+        if stream.fileno() == first_fd:
+          sys.stdout.write(line)
+        else:
+          outputs[stream.fileno()].append(line)
+      else:
+        streams.remove(stream)  # End of stream.
+
+  for p in procs:
+    sys.stdout.write(''.join(outputs[p.stdout.fileno()]))
+
+
+def _GetTestClasses(file_path):
+  test_jar_paths = subprocess.check_output([file_path, '--print-classpath'])
+  test_jar_paths = test_jar_paths.split(':')
+
+  test_classes = []
+  for test_jar_path in test_jar_paths:
+    # Avoid searching through jars that are for the test runner.
+    # TODO(crbug.com/1144077): Use robolectric buildconfig file arg.
+    if 'third_party/robolectric/' in test_jar_path:
+      continue
+
+    test_classes += _GetTestClassesFromJar(test_jar_path)
+
+  logging.info('Found %d test classes in class_path jars.', len(test_classes))
+  return test_classes
+
+
+def _GetTestClassesFromJar(test_jar_path):
+  """Returns a list of test classes from a jar.
+
+  Test files end in Test, this is enforced:
+  //tools/android/errorprone_plugin/src/org/chromium/tools/errorprone
+  /plugin/TestClassNameCheck.java
+
+  Args:
+    test_jar_path: Path to the jar.
+
+  Return:
+    Returns a list of test classes that were in the jar.
+  """
+  class_list = []
+  with zipfile.ZipFile(test_jar_path, 'r') as zip_f:
+    for test_class in zip_f.namelist():
+      if test_class.startswith(_EXCLUDED_CLASSES_PREFIXES):
+        continue
+      if test_class.endswith('Test.class') and '$' not in test_class:
+        class_list.append(test_class)
+
+  return class_list
diff --git a/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py b/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py
new file mode 100755
index 0000000..2bbe561
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import os
+import unittest
+
+from pylib.local.machine import local_machine_junit_test_run
+from py_utils import tempfile_ext
+from mock import patch  # pylint: disable=import-error
+
+
+class LocalMachineJunitTestRunTests(unittest.TestCase):
+  def testAddPropertiesJar(self):
+    with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+      apk = 'resource_apk'
+      cmd_list = []
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(cmd_list, [])
+      cmd_list = [['test1']]
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(
+          cmd_list[0],
+          ['test1', '--classpath',
+           os.path.join(temp_dir, 'properties.jar')])
+      cmd_list = [['test1'], ['test2']]
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(len(cmd_list[0]), 3)
+      self.assertEquals(
+          cmd_list[1],
+          ['test2', '--classpath',
+           os.path.join(temp_dir, 'properties.jar')])
+
+  @patch('multiprocessing.cpu_count')
+  def testChooseNumOfShards(self, mock_cpu_count):
+    mock_cpu_count.return_value = 36
+    # Test shards is 1 when filter is set.
+    test_shards = 1
+    test_classes = [1] * 50
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(1, shards)
+
+    # Tests setting shards.
+    test_shards = 4
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(4, shards)
+
+    # Tests using min_class per shards.
+    test_classes = [1] * 20
+    test_shards = 8
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(2, shards)
+
+  def testGroupTestsForShard(self):
+    test_classes = []
+    results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes)
+    self.assertDictEqual(results, {0: []})
+
+    test_classes = ['dir/test.class'] * 5
+    results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes)
+    self.assertDictEqual(results, {0: ['dir.test*'] * 5})
+
+    test_classes = ['dir/test.class'] * 5
+    results = local_machine_junit_test_run.GroupTestsForShard(2, test_classes)
+    ans_dict = {
+        0: ['dir.test*'] * 3,
+        1: ['dir.test*'] * 2,
+    }
+    self.assertDictEqual(results, ans_dict)
+
+    test_classes = ['a10 warthog', 'b17', 'SR71']
+    results = local_machine_junit_test_run.GroupTestsForShard(3, test_classes)
+    ans_dict = {
+        0: ['a10 warthog'],
+        1: ['b17'],
+        2: ['SR71'],
+    }
+    self.assertDictEqual(results, ans_dict)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/monkey/__init__.py b/src/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/monkey/__init__.py
diff --git a/src/build/android/pylib/monkey/monkey_test_instance.py b/src/build/android/pylib/monkey/monkey_test_instance.py
new file mode 100644
index 0000000..10b1131
--- /dev/null
+++ b/src/build/android/pylib/monkey/monkey_test_instance.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import random
+
+from pylib import constants
+from pylib.base import test_instance
+
+
+_SINGLE_EVENT_TIMEOUT = 100 # Milliseconds
+
+class MonkeyTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, _):
+    super(MonkeyTestInstance, self).__init__()
+
+    self._categories = args.categories
+    self._event_count = args.event_count
+    self._seed = args.seed or random.randint(1, 100)
+    self._throttle = args.throttle
+    self._verbose_count = args.verbose_count
+
+    self._package = constants.PACKAGE_INFO[args.browser].package
+    self._activity = constants.PACKAGE_INFO[args.browser].activity
+
+    self._timeout_s = (
+        self.event_count * (self.throttle + _SINGLE_EVENT_TIMEOUT)) / 1000
+
+  #override
+  def TestType(self):
+    return 'monkey'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def categories(self):
+    return self._categories
+
+  @property
+  def event_count(self):
+    return self._event_count
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def seed(self):
+    return self._seed
+
+  @property
+  def throttle(self):
+    return self._throttle
+
+  @property
+  def timeout(self):
+    return self._timeout_s
+
+  @property
+  def verbose_count(self):
+    return self._verbose_count
diff --git a/src/build/android/pylib/output/__init__.py b/src/build/android/pylib/output/__init__.py
new file mode 100644
index 0000000..a22a6ee
--- /dev/null
+++ b/src/build/android/pylib/output/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/output/local_output_manager.py b/src/build/android/pylib/output/local_output_manager.py
new file mode 100644
index 0000000..89becd7
--- /dev/null
+++ b/src/build/android/pylib/output/local_output_manager.py
@@ -0,0 +1,45 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+import os
+import shutil
+import urllib
+
+from pylib.base import output_manager
+
+
+class LocalOutputManager(output_manager.OutputManager):
+  """Saves and manages test output files locally in output directory.
+
+  Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}.
+  """
+
+  def __init__(self, output_dir):
+    super(LocalOutputManager, self).__init__()
+    timestamp = time.strftime(
+        '%Y_%m_%dT%H_%M_%S', time.localtime())
+    self._output_root = os.path.abspath(os.path.join(
+        output_dir, 'TEST_RESULTS_%s' % timestamp))
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    return LocalArchivedFile(
+        out_filename, out_subdir, datatype, self._output_root)
+
+
+class LocalArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype, out_root):
+    super(LocalArchivedFile, self).__init__(
+        out_filename, out_subdir, datatype)
+    self._output_path = os.path.join(out_root, out_subdir, out_filename)
+
+  def _Link(self):
+    return 'file://%s' % urllib.quote(self._output_path)
+
+  def _Archive(self):
+    if not os.path.exists(os.path.dirname(self._output_path)):
+      os.makedirs(os.path.dirname(self._output_path))
+    shutil.copy(self.name, self._output_path)
diff --git a/src/build/android/pylib/output/local_output_manager_test.py b/src/build/android/pylib/output/local_output_manager_test.py
new file mode 100755
index 0000000..7954350
--- /dev/null
+++ b/src/build/android/pylib/output/local_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import tempfile
+import shutil
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import local_output_manager
+
+
+class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_dir = tempfile.mkdtemp()
+    self._output_manager = local_output_manager.LocalOutputManager(
+        self._output_dir)
+
+  def testUsableTempFile(self):
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+  def tearDown(self):
+    shutil.rmtree(self._output_dir)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/output/noop_output_manager.py b/src/build/android/pylib/output/noop_output_manager.py
new file mode 100644
index 0000000..d29a743
--- /dev/null
+++ b/src/build/android/pylib/output/noop_output_manager.py
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import output_manager
+
+# TODO(jbudorick): This class is currently mostly unused.
+# Add a --bot-mode argument that all bots pass. If --bot-mode and
+# --local-output args are both not passed to test runner then use this
+# as the output manager impl.
+
+# pylint: disable=no-self-use
+
+class NoopOutputManager(output_manager.OutputManager):
+
+  def __init__(self):
+    super(NoopOutputManager, self).__init__()
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    del out_filename, out_subdir, datatype
+    return NoopArchivedFile()
+
+
+class NoopArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self):
+    super(NoopArchivedFile, self).__init__(None, None, None)
+
+  def Link(self):
+    """NoopArchivedFiles are not retained."""
+    return ''
+
+  def _Link(self):
+    pass
+
+  def Archive(self):
+    """NoopArchivedFiles are not retained."""
+    pass
+
+  def _Archive(self):
+    pass
diff --git a/src/build/android/pylib/output/noop_output_manager_test.py b/src/build/android/pylib/output/noop_output_manager_test.py
new file mode 100755
index 0000000..4e470ef
--- /dev/null
+++ b/src/build/android/pylib/output/noop_output_manager_test.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import noop_output_manager
+
+
+class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_manager = noop_output_manager.NoopOutputManager()
+
+  def testUsableTempFile(self):
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/output/remote_output_manager.py b/src/build/android/pylib/output/remote_output_manager.py
new file mode 100644
index 0000000..9fdb4bf
--- /dev/null
+++ b/src/build/android/pylib/output/remote_output_manager.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+from pylib.base import output_manager
+from pylib.output import noop_output_manager
+from pylib.utils import logdog_helper
+from pylib.utils import google_storage_helper
+
+
+class RemoteOutputManager(output_manager.OutputManager):
+
+  def __init__(self, bucket):
+    """Uploads output files to Google Storage or LogDog.
+
+    Files will either be uploaded directly to Google Storage or LogDog
+    depending on the datatype.
+
+    Args
+      bucket: Bucket to use when saving to Google Storage.
+    """
+    super(RemoteOutputManager, self).__init__()
+    self._bucket = bucket
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    if datatype == output_manager.Datatype.TEXT:
+      try:
+        logdog_helper.get_logdog_client()
+        return LogdogArchivedFile(out_filename, out_subdir, datatype)
+      except RuntimeError:
+        return noop_output_manager.NoopArchivedFile()
+    else:
+      if self._bucket is None:
+        return noop_output_manager.NoopArchivedFile()
+      return GoogleStorageArchivedFile(
+          out_filename, out_subdir, datatype, self._bucket)
+
+
+class LogdogArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype):
+    super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype)
+    self._stream_name = '%s_%s' % (out_subdir, out_filename)
+
+  def _Link(self):
+    return logdog_helper.get_viewer_url(self._stream_name)
+
+  def _Archive(self):
+    with open(self.name, 'r') as f:
+      logdog_helper.text(self._stream_name, f.read())
+
+
+class GoogleStorageArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype, bucket):
+    super(GoogleStorageArchivedFile, self).__init__(
+        out_filename, out_subdir, datatype)
+    self._bucket = bucket
+    self._upload_path = None
+    self._content_addressed = None
+
+  def _PrepareArchive(self):
+    self._content_addressed = (self._datatype in (
+        output_manager.Datatype.HTML,
+        output_manager.Datatype.PNG,
+        output_manager.Datatype.JSON))
+    if self._content_addressed:
+      sha1 = hashlib.sha1()
+      with open(self.name, 'rb') as f:
+        sha1.update(f.read())
+      self._upload_path = sha1.hexdigest()
+    else:
+      self._upload_path = os.path.join(self._out_subdir, self._out_filename)
+
+  def _Link(self):
+    return google_storage_helper.get_url_link(
+        self._upload_path, self._bucket)
+
+  def _Archive(self):
+    if (self._content_addressed and
+        google_storage_helper.exists(self._upload_path, self._bucket)):
+      return
+
+    google_storage_helper.upload(
+        self._upload_path, self.name, self._bucket, content_type=self._datatype)
diff --git a/src/build/android/pylib/output/remote_output_manager_test.py b/src/build/android/pylib/output/remote_output_manager_test.py
new file mode 100755
index 0000000..4c6c081
--- /dev/null
+++ b/src/build/android/pylib/output/remote_output_manager_test.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import remote_output_manager
+
+import mock  # pylint: disable=import-error
+
+
+@mock.patch('pylib.utils.google_storage_helper')
+class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_manager = remote_output_manager.RemoteOutputManager(
+        'this-is-a-fake-bucket')
+
+  def testUsableTempFile(self, google_storage_helper_mock):
+    del google_storage_helper_mock
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/pexpect.py b/src/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..cf59fb0
--- /dev/null
+++ b/src/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+  pass
diff --git a/src/build/android/pylib/restart_adbd.sh b/src/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/src/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+  stop adbd
+  start adbd
+}
+
+restart &
diff --git a/src/build/android/pylib/results/__init__.py b/src/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/flakiness_dashboard/__init__.py b/src/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000..b2e542b
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,699 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# tools/blinkpy/web_tests/layout_package/json_results_generator.py
+# tools/blinkpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+  return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+  # FIXME: Kill this code once the server returns json instead of jsonp.
+  if HasJSONWrapper(json_content):
+    return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+  return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+  # Specify separators in order to get compact encoding.
+  json_string = json.dumps(json_object, separators=(',', ':'))
+  if callback:
+    json_string = callback + '(' + json_string + ');'
+  with open(file_path, 'w') as fp:
+    fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flattens the trie of paths, prepending a prefix to each."""
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and not 'results' in data:
+      result.update(ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def AddPathToTrie(path, value, trie):
+  """Inserts a single path and value into a directory trie structure."""
+  if not '/' in path:
+    trie[path] = value
+    return
+
+  directory, _, rest = path.partition('/')
+  if not directory in trie:
+    trie[directory] = {}
+  AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+  """Breaks a test name into dicts by directory
+
+  foo/bar/baz.html: 1ms
+  foo/bar/baz1.html: 3ms
+
+  becomes
+  foo: {
+      bar: {
+          baz.html: 1,
+          baz1.html: 3
+      }
+  }
+  """
+  trie = {}
+  for test_result in individual_test_timings:
+    test = test_result.test_name
+
+    AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+  return trie
+
+
+class TestResult(object):
+  """A simple class that represents a single test result."""
+
+  # Test modifier constants.
+  (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+  def __init__(self, test, failed=False, elapsed_time=0):
+    self.test_name = test
+    self.failed = failed
+    self.test_run_time = elapsed_time
+
+    test_name = test
+    try:
+      test_name = test.split('.')[1]
+    except IndexError:
+      _log.warn('Invalid test name: %s.', test)
+
+    if test_name.startswith('FAILS_'):
+      self.modifier = self.FAILS
+    elif test_name.startswith('FLAKY_'):
+      self.modifier = self.FLAKY
+    elif test_name.startswith('DISABLED_'):
+      self.modifier = self.DISABLED
+    else:
+      self.modifier = self.NONE
+
+  def Fixable(self):
+    return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+  """A JSON results generator for generic tests."""
+
+  MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+  # Min time (seconds) that will be added to the JSON.
+  MIN_TIME = 1
+
+  # Note that in non-chromium tests those chars are used to indicate
+  # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+  PASS_RESULT = 'P'
+  SKIP_RESULT = 'X'
+  FAIL_RESULT = 'F'
+  FLAKY_RESULT = 'L'
+  NO_DATA_RESULT = 'N'
+
+  MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+                      TestResult.DISABLED: SKIP_RESULT,
+                      TestResult.FAILS: FAIL_RESULT,
+                      TestResult.FLAKY: FLAKY_RESULT}
+
+  VERSION = 4
+  VERSION_KEY = 'version'
+  RESULTS = 'results'
+  TIMES = 'times'
+  BUILD_NUMBERS = 'buildNumbers'
+  TIME = 'secondsSinceEpoch'
+  TESTS = 'tests'
+
+  FIXABLE_COUNT = 'fixableCount'
+  FIXABLE = 'fixableCounts'
+  ALL_FIXABLE_COUNT = 'allFixableCount'
+
+  RESULTS_FILENAME = 'results.json'
+  TIMES_MS_FILENAME = 'times_ms.json'
+  INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+  # line too long pylint: disable=line-too-long
+  URL_FOR_TEST_LIST_JSON = (
+      'https://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&'
+      'master=%s')
+  # pylint: enable=line-too-long
+
+  def __init__(self, builder_name, build_name, build_number,
+               results_file_base_path, builder_base_url,
+               test_results_map, svn_repositories=None,
+               test_results_server=None,
+               test_type='',
+               master_name=''):
+    """Modifies the results.json file. Grabs it off the archive directory
+    if it is not found locally.
+
+    Args
+      builder_name: the builder name (e.g. Webkit).
+      build_name: the build name (e.g. webkit-rel).
+      build_number: the build number.
+      results_file_base_path: Absolute path to the directory containing the
+          results json file.
+      builder_base_url: the URL where we have the archived test results.
+          If this is None no archived results will be retrieved.
+      test_results_map: A dictionary that maps test_name to TestResult.
+      svn_repositories: A (json_field_name, svn_path) pair for SVN
+          repositories that tests rely on.  The SVN revision will be
+          included in the JSON with the given json_field_name.
+      test_results_server: server that hosts test results json.
+      test_type: test type string (e.g. 'layout-tests').
+      master_name: the name of the buildbot master.
+    """
+    self._builder_name = builder_name
+    self._build_name = build_name
+    self._build_number = build_number
+    self._builder_base_url = builder_base_url
+    self._results_directory = results_file_base_path
+
+    self._test_results_map = test_results_map
+    self._test_results = test_results_map.values()
+
+    self._svn_repositories = svn_repositories
+    if not self._svn_repositories:
+      self._svn_repositories = {}
+
+    self._test_results_server = test_results_server
+    self._test_type = test_type
+    self._master_name = master_name
+
+    self._archived_results = None
+
+  def GenerateJSONOutput(self):
+    json_object = self.GetJSON()
+    if json_object:
+      file_path = (
+          os.path.join(
+              self._results_directory,
+              self.INCREMENTAL_RESULTS_FILENAME))
+      WriteJSON(json_object, file_path)
+
+  def GenerateTimesMSFile(self):
+    times = TestTimingsTrie(self._test_results_map.values())
+    file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+    WriteJSON(times, file_path)
+
+  def GetJSON(self):
+    """Gets the results for the results.json file."""
+    results_json = {}
+
+    if not results_json:
+      results_json, error = self._GetArchivedJSONResults()
+      if error:
+        # If there was an error don't write a results.json
+        # file at all as it would lose all the information on the
+        # bot.
+        _log.error('Archive directory is inaccessible. Not '
+                   'modifying or clobbering the results.json '
+                   'file: ' + str(error))
+        return None
+
+    builder_name = self._builder_name
+    if results_json and builder_name not in results_json:
+      _log.debug('Builder name (%s) is not in the results.json file.',
+                 builder_name)
+
+    self._ConvertJSONToCurrentVersion(results_json)
+
+    if builder_name not in results_json:
+      results_json[builder_name] = (
+          self._CreateResultsForBuilderJSON())
+
+    results_for_builder = results_json[builder_name]
+
+    if builder_name:
+      self._InsertGenericMetaData(results_for_builder)
+
+    self._InsertFailureSummaries(results_for_builder)
+
+    # Update the all failing tests with result type and time.
+    tests = results_for_builder[self.TESTS]
+    all_failing_tests = self._GetFailedTestNames()
+    all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+    for test in all_failing_tests:
+      self._InsertTestTimeAndResult(test, tests)
+
+    return results_json
+
+  def SetArchivedResults(self, archived_results):
+    self._archived_results = archived_results
+
+  def UploadJSONFiles(self, json_files):
+    """Uploads the given json_files to the test_results_server (if the
+    test_results_server is given)."""
+    if not self._test_results_server:
+      return
+
+    if not self._master_name:
+      _log.error(
+          '--test-results-server was set, but --master-name was not.  Not '
+          'uploading JSON files.')
+      return
+
+    _log.info('Uploading JSON files for builder: %s', self._builder_name)
+    attrs = [('builder', self._builder_name),
+             ('testtype', self._test_type),
+             ('master', self._master_name)]
+
+    files = [(json_file, os.path.join(self._results_directory, json_file))
+             for json_file in json_files]
+
+    url = 'https://%s/testfile/upload' % self._test_results_server
+    # Set uploading timeout in case appengine server is having problems.
+    # 120 seconds are more than enough to upload test results.
+    uploader = _FileUploader(url, 120)
+    try:
+      response = uploader.UploadAsMultipartFormData(files, attrs)
+      if response:
+        if response.code == 200:
+          _log.info('JSON uploaded.')
+        else:
+          _log.debug(
+              "JSON upload failed, %d: '%s'", response.code, response.read())
+      else:
+        _log.error('JSON upload failed; no response returned')
+    except Exception as err: # pylint: disable=broad-except
+      _log.error('Upload failed: %s', err)
+      return
+
+  def _GetTestTiming(self, test_name):
+    """Returns test timing data (elapsed time) in second
+    for the given test_name."""
+    if test_name in self._test_results_map:
+      # Floor for now to get time in seconds.
+      return int(self._test_results_map[test_name].test_run_time)
+    return 0
+
+  def _GetFailedTestNames(self):
+    """Returns a set of failed test names."""
+    return set([r.test_name for r in self._test_results if r.failed])
+
+  def _GetModifierChar(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+      return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+    return self.__class__.PASS_RESULT
+
+  def _get_result_char(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier == TestResult.DISABLED:
+      return self.__class__.SKIP_RESULT
+
+    if test_result.failed:
+      return self.__class__.FAIL_RESULT
+
+    return self.__class__.PASS_RESULT
+
+  def _GetSVNRevision(self, in_directory):
+    """Returns the svn revision for the given directory.
+
+    Args:
+      in_directory: The directory where svn is to be run.
+    """
+    # This is overridden in flakiness_dashboard_results_uploader.py.
+    raise NotImplementedError()
+
+  def _GetArchivedJSONResults(self):
+    """Download JSON file that only contains test
+    name list from test-results server. This is for generating incremental
+    JSON so the file generated has info for tests that failed before but
+    pass or are skipped from current run.
+
+    Returns (archived_results, error) tuple where error is None if results
+    were successfully read.
+    """
+    results_json = {}
+    old_results = None
+    error = None
+
+    if not self._test_results_server:
+      return {}, None
+
+    results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+                        (urllib2.quote(self._test_results_server),
+                         urllib2.quote(self._builder_name),
+                         self.RESULTS_FILENAME,
+                         urllib2.quote(self._test_type),
+                         urllib2.quote(self._master_name)))
+
+    # pylint: disable=redefined-variable-type
+    try:
+      # FIXME: We should talk to the network via a Host object.
+      results_file = urllib2.urlopen(results_file_url)
+      old_results = results_file.read()
+    except urllib2.HTTPError as http_error:
+      # A non-4xx status code means the bot is hosed for some reason
+      # and we can't grab the results.json file off of it.
+      if http_error.code < 400 and http_error.code >= 500:
+        error = http_error
+    except urllib2.URLError as url_error:
+      error = url_error
+    # pylint: enable=redefined-variable-type
+
+    if old_results:
+      # Strip the prefix and suffix so we can get the actual JSON object.
+      old_results = StripJSONWrapper(old_results)
+
+      try:
+        results_json = json.loads(old_results)
+      except Exception: # pylint: disable=broad-except
+        _log.debug('results.json was not valid JSON. Clobbering.')
+        # The JSON file is not valid JSON. Just clobber the results.
+        results_json = {}
+    else:
+      _log.debug('Old JSON results do not exist. Starting fresh.')
+      results_json = {}
+
+    return results_json, error
+
+  def _InsertFailureSummaries(self, results_for_builder):
+    """Inserts aggregate pass/failure statistics into the JSON.
+    This method reads self._test_results and generates
+    FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+    """
+    # Insert the number of tests that failed or skipped.
+    fixable_count = len([r for r in self._test_results if r.Fixable()])
+    self._InsertItemIntoRawList(results_for_builder,
+                                fixable_count, self.FIXABLE_COUNT)
+
+    # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+    entry = {}
+    for test_name in self._test_results_map.iterkeys():
+      result_char = self._GetModifierChar(test_name)
+      entry[result_char] = entry.get(result_char, 0) + 1
+
+    # Insert the pass/skip/failure summary dictionary.
+    self._InsertItemIntoRawList(results_for_builder, entry,
+                                self.FIXABLE)
+
+    # Insert the number of all the tests that are supposed to pass.
+    all_test_count = len(self._test_results)
+    self._InsertItemIntoRawList(results_for_builder,
+                                all_test_count, self.ALL_FIXABLE_COUNT)
+
+  def _InsertItemIntoRawList(self, results_for_builder, item, key):
+    """Inserts the item into the list with the given key in the results for
+    this builder. Creates the list if no such list exists.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+      item: Number or string to insert into the list.
+      key: Key in results_for_builder for the list to insert into.
+    """
+    if key in results_for_builder:
+      raw_list = results_for_builder[key]
+    else:
+      raw_list = []
+
+    raw_list.insert(0, item)
+    raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+    results_for_builder[key] = raw_list
+
+  def _InsertItemRunLengthEncoded(self, item, encoded_results):
+    """Inserts the item into the run-length encoded results.
+
+    Args:
+      item: String or number to insert.
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    if len(encoded_results) and item == encoded_results[0][1]:
+      num_results = encoded_results[0][0]
+      if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        encoded_results[0][0] = num_results + 1
+    else:
+      # Use a list instead of a class for the run-length encoding since
+      # we want the serialized form to be concise.
+      encoded_results.insert(0, [1, item])
+
+  def _InsertGenericMetaData(self, results_for_builder):
+    """ Inserts generic metadata (such as version number, current time etc)
+    into the JSON.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for
+          a single builder.
+    """
+    self._InsertItemIntoRawList(results_for_builder,
+                                self._build_number, self.BUILD_NUMBERS)
+
+    # Include SVN revisions for the given repositories.
+    for (name, path) in self._svn_repositories:
+      # Note: for JSON file's backward-compatibility we use 'chrome' rather
+      # than 'chromium' here.
+      lowercase_name = name.lower()
+      if lowercase_name == 'chromium':
+        lowercase_name = 'chrome'
+      self._InsertItemIntoRawList(results_for_builder,
+                                  self._GetSVNRevision(path),
+                                  lowercase_name + 'Revision')
+
+    self._InsertItemIntoRawList(results_for_builder,
+                                int(time.time()),
+                                self.TIME)
+
+  def _InsertTestTimeAndResult(self, test_name, tests):
+    """ Insert a test item with its results to the given tests dictionary.
+
+    Args:
+      tests: Dictionary containing test result entries.
+    """
+
+    result = self._get_result_char(test_name)
+    test_time = self._GetTestTiming(test_name)
+
+    this_test = tests
+    for segment in test_name.split('/'):
+      if segment not in this_test:
+        this_test[segment] = {}
+      this_test = this_test[segment]
+
+    if not len(this_test):
+      self._PopulateResultsAndTimesJSON(this_test)
+
+    if self.RESULTS in this_test:
+      self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+    else:
+      this_test[self.RESULTS] = [[1, result]]
+
+    if self.TIMES in this_test:
+      self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+    else:
+      this_test[self.TIMES] = [[1, test_time]]
+
+  def _ConvertJSONToCurrentVersion(self, results_json):
+    """If the JSON does not match the current version, converts it to the
+    current version and adds in the new version number.
+    """
+    if self.VERSION_KEY in results_json:
+      archive_version = results_json[self.VERSION_KEY]
+      if archive_version == self.VERSION:
+        return
+    else:
+      archive_version = 3
+
+    # version 3->4
+    if archive_version == 3:
+      for results in results_json.values():
+        self._ConvertTestsToTrie(results)
+
+    results_json[self.VERSION_KEY] = self.VERSION
+
+  def _ConvertTestsToTrie(self, results):
+    if not self.TESTS in results:
+      return
+
+    test_results = results[self.TESTS]
+    test_results_trie = {}
+    for test in test_results.iterkeys():
+      single_test_result = test_results[test]
+      AddPathToTrie(test, single_test_result, test_results_trie)
+
+    results[self.TESTS] = test_results_trie
+
+  def _PopulateResultsAndTimesJSON(self, results_and_times):
+    results_and_times[self.RESULTS] = []
+    results_and_times[self.TIMES] = []
+    return results_and_times
+
+  def _CreateResultsForBuilderJSON(self):
+    results_for_builder = {}
+    results_for_builder[self.TESTS] = {}
+    return results_for_builder
+
+  def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+    """Removes items from the run-length encoded list after the final
+    item that exceeds the max number of builds to track.
+
+    Args:
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    num_builds = 0
+    index = 0
+    for result in encoded_list:
+      num_builds = num_builds + result[0]
+      index = index + 1
+      if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        return encoded_list[:index]
+    return encoded_list
+
+  def _NormalizeResultsJSON(self, test, test_name, tests):
+    """ Prune tests where all runs pass or tests that no longer exist and
+    truncate all results to maxNumberOfBuilds.
+
+    Args:
+      test: ResultsAndTimes object for this test.
+      test_name: Name of the test.
+      tests: The JSON object with all the test results for this builder.
+    """
+    test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.RESULTS])
+    test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.TIMES])
+
+    is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+                                           self.PASS_RESULT)
+    is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+                                              self.NO_DATA_RESULT)
+    max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+    # Remove all passes/no-data from the results to reduce noise and
+    # filesize. If a test passes every run, but takes > MIN_TIME to run,
+    # don't throw away the data.
+    if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+      del tests[test_name]
+
+  # method could be a function pylint: disable=R0201
+  def _IsResultsAllOfType(self, results, result_type):
+    """Returns whether all the results are of the given type
+    (e.g. all passes)."""
+    return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+  def __init__(self, url, timeout_seconds):
+    self._url = url
+    self._timeout_seconds = timeout_seconds
+
+  def UploadAsMultipartFormData(self, files, attrs):
+    file_objs = []
+    for filename, path in files:
+      with file(path, 'rb') as fp:
+        file_objs.append(('file', filename, fp.read()))
+
+    # FIXME: We should use the same variable names for the formal and actual
+    # parameters.
+    content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+    return self._UploadData(content_type, data)
+
+  def _UploadData(self, content_type, data):
+    start = time.time()
+    end = start + self._timeout_seconds
+    while time.time() < end:
+      try:
+        request = urllib2.Request(self._url, data,
+                                  {'Content-Type': content_type})
+        return urllib2.urlopen(request)
+      except urllib2.HTTPError as e:
+        _log.warn("Received HTTP status %s loading \"%s\".  "
+                  'Retrying in 10 seconds...', e.code, e.filename)
+        time.sleep(10)
+
+
+def _GetMIMEType(filename):
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+
+  for key, value in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  for key, filename, value in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; '
+                 'filename="%s"' % (key, filename))
+    lines.append('Content-Type: %s' % _GetMIMEType(filename))
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
diff --git a/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
new file mode 100644
index 0000000..d6aee05
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.results.flakiness_dashboard import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+  def setUp(self):
+    self.builder_name = 'DUMMY_BUILDER_NAME'
+    self.build_name = 'DUMMY_BUILD_NAME'
+    self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+    # For archived results.
+    self._json = None
+    self._num_runs = 0
+    self._tests_set = set([])
+    self._test_timings = {}
+    self._failed_count_map = {}
+
+    self._PASS_count = 0
+    self._DISABLED_count = 0
+    self._FLAKY_count = 0
+    self._FAILS_count = 0
+    self._fixable_count = 0
+
+    self._orig_write_json = json_results_generator.WriteJSON
+
+    # unused arguments ... pylint: disable=W0613
+    def _WriteJSONStub(json_object, file_path, callback=None):
+      pass
+
+    json_results_generator.WriteJSON = _WriteJSONStub
+
+  def tearDown(self):
+    json_results_generator.WriteJSON = self._orig_write_json
+
+  def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+    tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+    DISABLED_tests = set([t for t in tests_set
+                          if t.startswith('DISABLED_')])
+    FLAKY_tests = set([t for t in tests_set
+                       if t.startswith('FLAKY_')])
+    FAILS_tests = set([t for t in tests_set
+                       if t.startswith('FAILS_')])
+    PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+    failed_tests = set(failed_tests_list) - DISABLED_tests
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    test_timings = {}
+    i = 0
+    for test in tests_set:
+      test_timings[test] = float(self._num_runs * 100 + i)
+      i += 1
+
+    test_results_map = dict()
+    for test in tests_set:
+      test_results_map[test] = json_results_generator.TestResult(
+          test, failed=(test in failed_tests),
+          elapsed_time=test_timings[test])
+
+    generator = json_results_generator.JSONResultsGeneratorBase(
+        self.builder_name, self.build_name, self.build_number,
+        '',
+        None,   # don't fetch past json results archive
+        test_results_map)
+
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    # Test incremental json results
+    incremental_json = generator.GetJSON()
+    self._VerifyJSONResults(
+        tests_set,
+        test_timings,
+        failed_count_map,
+        len(PASS_tests),
+        len(DISABLED_tests),
+        len(FLAKY_tests),
+        len(DISABLED_tests | failed_tests),
+        incremental_json,
+        1)
+
+    # We don't verify the results here, but at least we make sure the code
+    # runs without errors.
+    generator.GenerateJSONOutput()
+    generator.GenerateTimesMSFile()
+
+  def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+                         PASS_count, DISABLED_count, FLAKY_count,
+                         fixable_count, json_obj, num_runs):
+    # Aliasing to a short name for better access to its constants.
+    JRG = json_results_generator.JSONResultsGeneratorBase
+
+    self.assertIn(JRG.VERSION_KEY, json_obj)
+    self.assertIn(self.builder_name, json_obj)
+
+    buildinfo = json_obj[self.builder_name]
+    self.assertIn(JRG.FIXABLE, buildinfo)
+    self.assertIn(JRG.TESTS, buildinfo)
+    self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+    self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+    if tests_set or DISABLED_count:
+      fixable = {}
+      for fixable_items in buildinfo[JRG.FIXABLE]:
+        for (result_type, count) in fixable_items.iteritems():
+          if result_type in fixable:
+            fixable[result_type] = fixable[result_type] + count
+          else:
+            fixable[result_type] = count
+
+      if PASS_count:
+        self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+      else:
+        self.assertTrue(JRG.PASS_RESULT not in fixable or
+                        fixable[JRG.PASS_RESULT] == 0)
+      if DISABLED_count:
+        self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+      else:
+        self.assertTrue(JRG.SKIP_RESULT not in fixable or
+                        fixable[JRG.SKIP_RESULT] == 0)
+      if FLAKY_count:
+        self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+      else:
+        self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+                        fixable[JRG.FLAKY_RESULT] == 0)
+
+    if failed_count_map:
+      tests = buildinfo[JRG.TESTS]
+      for test_name in failed_count_map.iterkeys():
+        test = self._FindTestInTrie(test_name, tests)
+
+        failed = 0
+        for result in test[JRG.RESULTS]:
+          if result[1] == JRG.FAIL_RESULT:
+            failed += result[0]
+        self.assertEqual(failed_count_map[test_name], failed)
+
+        timing_count = 0
+        for timings in test[JRG.TIMES]:
+          if timings[1] == test_timings[test_name]:
+            timing_count = timings[0]
+        self.assertEqual(1, timing_count)
+
+    if fixable_count:
+      self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+  def _FindTestInTrie(self, path, trie):
+    nodes = path.split('/')
+    sub_trie = trie
+    for node in nodes:
+      self.assertIn(node, sub_trie)
+      sub_trie = sub_trie[node]
+    return sub_trie
+
+  def testJSONGeneration(self):
+    self._TestJSONGeneration([], [])
+    self._TestJSONGeneration(['A1', 'B1'], [])
+    self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+    self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+    self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+    self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+    self._TestJSONGeneration(
+        ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+        ['FAILS_D6'])
+
+    # Generate JSON with the same test sets. (Both incremental results and
+    # archived results must be updated appropriately.)
+    self._TestJSONGeneration(
+        ['A', 'FLAKY_B', 'DISABLED_C'],
+        ['FAILS_D', 'FLAKY_E'])
+    self._TestJSONGeneration(
+        ['A', 'DISABLED_C', 'FLAKY_E'],
+        ['FLAKY_B', 'FAILS_D'])
+    self._TestJSONGeneration(
+        ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+        ['A', 'FLAKY_E'])
+
+  def testHierarchicalJSNGeneration(self):
+    # FIXME: Re-work tests to be more comprehensible and comprehensive.
+    self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+  def testTestTimingsTrie(self):
+    individual_test_timings = []
+    individual_test_timings.append(
+        json_results_generator.TestResult(
+            'foo/bar/baz.html',
+            elapsed_time=1.2))
+    individual_test_timings.append(
+        json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+    trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+    expected_trie = {
+        'bar.html': 0,
+        'foo': {
+            'bar': {
+                'baz.html': 1200,
+            }
+        }
+    }
+
+    self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000..b68a898
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _GetModifierChar(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _GetSVNRevision(self, in_directory):
+    """Returns the git/svn revision for the given directory.
+
+    Args:
+      in_directory: The directory relative to src.
+    """
+    def _is_git_directory(in_directory):
+      """Returns true if the given directory is in a git repository.
+
+      Args:
+        in_directory: The directory path to be tested.
+      """
+      if os.path.exists(os.path.join(in_directory, '.git')):
+        return True
+      parent = os.path.dirname(in_directory)
+      if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory:
+        return False
+      return _is_git_directory(parent)
+
+    in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory)
+
+    if not os.path.exists(os.path.join(in_directory, '.svn')):
+      if _is_git_directory(in_directory):
+        return repo_utils.GetGitHeadSHA1(in_directory)
+      else:
+        return ''
+
+    output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+    try:
+      dom = xml.dom.minidom.parseString(output)
+      return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+    except xml.parsers.expat.ExpatError:
+      return ''
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._master_name = os.environ.get('BUILDBOT_MASTERNAME')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+    self._build_name = None
+
+    if not self._build_number or not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    upstream = (tests_type != 'Chromium_Android_Instrumentation')
+    if not upstream:
+      self._build_name = 'chromium-android'
+      buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+      if not buildbot_branch:
+        buildbot_branch = 'master'
+      else:
+        # Ensure there's no leading "origin/"
+        buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+      self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    # TODO(frankf): Differentiate between fail/crash/timeouts.
+    conversion_map = [
+        (test_results.GetPass(), False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.GetFail(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetCrash(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetTimeout(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetUnknown(), True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.GetName(),
+            failed=failed,
+            elapsed_time=single_test_result.GetDuration() / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.GetName()] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.GenerateJSONOutput()
+      results_generator.GenerateTimesMSFile()
+      results_generator.UploadJSONFiles(json_files)
+    except Exception as e: # pylint: disable=broad-except
+      logging.error("Uploading results to test server failed: %s.", e)
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    results: test results.
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/src/build/android/pylib/results/json_results.py b/src/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000..9b3bcb5
--- /dev/null
+++ b/src/build/android/pylib/results/json_results.py
@@ -0,0 +1,229 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import json
+import logging
+import time
+
+import six
+
+from pylib.base import base_test_result
+
+def GenerateResultsDict(test_run_results, global_tags=None):
+  """Create a results dict from |test_run_results| suitable for writing to JSON.
+  Args:
+    test_run_results: a list of base_test_result.TestRunResults objects.
+  Returns:
+    A results dict that mirrors the one generated by
+      base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+  """
+  # Example json output.
+  # {
+  #   "global_tags": [],
+  #   "all_tests": [
+  #     "test1",
+  #     "test2",
+  #    ],
+  #   "disabled_tests": [],
+  #   "per_iteration_data": [
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #         ...
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #         ...
+  #       ],
+  #     },
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #     },
+  #     ...
+  #   ],
+  # }
+
+  all_tests = set()
+  per_iteration_data = []
+  test_run_links = {}
+
+  for test_run_result in test_run_results:
+    iteration_data = collections.defaultdict(list)
+    if isinstance(test_run_result, list):
+      results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+      for tr in test_run_result:
+        test_run_links.update(tr.GetLinks())
+
+    else:
+      results_iterable = test_run_result.GetAll()
+      test_run_links.update(test_run_result.GetLinks())
+
+    for r in results_iterable:
+      result_dict = {
+          'status': r.GetType(),
+          'elapsed_time_ms': r.GetDuration(),
+          'output_snippet': six.ensure_text(r.GetLog(), errors='replace'),
+          'losless_snippet': True,
+          'output_snippet_base64': '',
+          'links': r.GetLinks(),
+      }
+      iteration_data[r.GetName()].append(result_dict)
+
+    all_tests = all_tests.union(set(six.iterkeys(iteration_data)))
+    per_iteration_data.append(iteration_data)
+
+  return {
+    'global_tags': global_tags or [],
+    'all_tests': sorted(list(all_tests)),
+    # TODO(jbudorick): Add support for disabled tests within base_test_result.
+    'disabled_tests': [],
+    'per_iteration_data': per_iteration_data,
+    'links': test_run_links,
+  }
+
+
+def GenerateJsonTestResultFormatDict(test_run_results, interrupted):
+  """Create a results dict from |test_run_results| suitable for writing to JSON.
+
+  Args:
+    test_run_results: a list of base_test_result.TestRunResults objects.
+    interrupted: True if tests were interrupted, e.g. timeout listing tests
+  Returns:
+    A results dict that mirrors the standard JSON Test Results Format.
+  """
+
+  tests = {}
+  counts = {'PASS': 0, 'FAIL': 0}
+
+  for test_run_result in test_run_results:
+    if isinstance(test_run_result, list):
+      results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+    else:
+      results_iterable = test_run_result.GetAll()
+
+    for r in results_iterable:
+      element = tests
+      for key in r.GetName().split('.'):
+        if key not in element:
+          element[key] = {}
+        element = element[key]
+
+      element['expected'] = 'PASS'
+
+      result = 'PASS' if r.GetType(
+      ) == base_test_result.ResultType.PASS else 'FAIL'
+
+      if 'actual' in element:
+        element['actual'] += ' ' + result
+      else:
+        counts[result] += 1
+        element['actual'] = result
+        if result == 'FAIL':
+          element['is_unexpected'] = True
+
+      if r.GetDuration() != 0:
+        element['time'] = r.GetDuration()
+
+  # Fill in required fields.
+  return {
+      'interrupted': interrupted,
+      'num_failures_by_type': counts,
+      'path_delimiter': '.',
+      'seconds_since_epoch': time.time(),
+      'tests': tests,
+      'version': 3,
+  }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path, global_tags=None,
+                            **kwargs):
+  """Write |test_run_result| to JSON.
+
+  This emulates the format of the JSON emitted by
+  base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(json.dumps(
+        GenerateResultsDict(test_run_result, global_tags=global_tags),
+        **kwargs))
+    logging.info('Generated json results file at %s', file_path)
+
+
+def GenerateJsonTestResultFormatFile(test_run_result, interrupted, file_path,
+                                     **kwargs):
+  """Write |test_run_result| to JSON.
+
+  This uses the official Chromium Test Results Format.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    interrupted: True if tests were interrupted, e.g. timeout listing tests
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(
+        json.dumps(
+            GenerateJsonTestResultFormatDict(test_run_result, interrupted),
+            **kwargs))
+    logging.info('Generated json results file at %s', file_path)
+
+
+def ParseResultsFromJson(json_results):
+  """Creates a list of BaseTestResult objects from JSON.
+
+  Args:
+    json_results: A JSON dict in the format created by
+                  GenerateJsonResultsFile.
+  """
+
+  def string_as_status(s):
+    if s in base_test_result.ResultType.GetTypes():
+      return s
+    return base_test_result.ResultType.UNKNOWN
+
+  results_list = []
+  testsuite_runs = json_results['per_iteration_data']
+  for testsuite_run in testsuite_runs:
+    for test, test_runs in testsuite_run.iteritems():
+      results_list.extend(
+          [base_test_result.BaseTestResult(test,
+                                           string_as_status(tr['status']),
+                                           duration=tr['elapsed_time_ms'])
+          for tr in test_runs])
+  return results_list
diff --git a/src/build/android/pylib/results/json_results_test.py b/src/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000..6647331
--- /dev/null
+++ b/src/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+  def testGenerateResultsDict_passedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_skippedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.SKIP)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+  def testGenerateResultsDict_failedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('FAILURE', test_iteration_result['status'])
+
+  def testGenerateResultsDict_duration(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('elapsed_time_ms' in test_iteration_result)
+    self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+  def testGenerateResultsDict_multipleResults(self):
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName2', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result1)
+    all_results.AddResult(result2)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName1', 'test.package.TestName2'],
+        results_dict['all_tests'])
+
+    self.assertTrue('per_iteration_data' in results_dict)
+    iterations = results_dict['per_iteration_data']
+    self.assertEquals(1, len(iterations))
+
+    expected_tests = set([
+        'test.package.TestName1',
+        'test.package.TestName2',
+    ])
+
+    for test_name, iteration_result in iterations[0].iteritems():
+      self.assertTrue(test_name in expected_tests)
+      expected_tests.remove(test_name)
+      self.assertEquals(1, len(iteration_result))
+
+      test_iteration_result = iteration_result[0]
+      self.assertTrue('status' in test_iteration_result)
+      self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_passOnRetry(self):
+    raw_results = []
+
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.FAIL)
+    run_results1 = base_test_result.TestRunResults()
+    run_results1.AddResult(result1)
+    raw_results.append(run_results1)
+
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    run_results2 = base_test_result.TestRunResults()
+    run_results2.AddResult(result2)
+    raw_results.append(run_results2)
+
+    results_dict = json_results.GenerateResultsDict([raw_results])
+    self.assertEquals(['test.package.TestName1'], results_dict['all_tests'])
+
+    # Check that there's only one iteration.
+    self.assertIn('per_iteration_data', results_dict)
+    iterations = results_dict['per_iteration_data']
+    self.assertEquals(1, len(iterations))
+
+    # Check that test.package.TestName1 is the only test in the iteration.
+    self.assertEquals(1, len(iterations[0]))
+    self.assertIn('test.package.TestName1', iterations[0])
+
+    # Check that there are two results for test.package.TestName1.
+    actual_test_results = iterations[0]['test.package.TestName1']
+    self.assertEquals(2, len(actual_test_results))
+
+    # Check that the first result is a failure.
+    self.assertIn('status', actual_test_results[0])
+    self.assertEquals('FAILURE', actual_test_results[0]['status'])
+
+    # Check that the second result is a success.
+    self.assertIn('status', actual_test_results[1])
+    self.assertEquals('SUCCESS', actual_test_results[1]['status'])
+
+  def testGenerateResultsDict_globalTags(self):
+    raw_results = []
+    global_tags = ['UNRELIABLE_RESULTS']
+
+    results_dict = json_results.GenerateResultsDict(
+        [raw_results], global_tags=global_tags)
+    self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
+
+  def testGenerateResultsDict_loslessSnippet(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+    log = 'blah-blah'
+    result.SetLog(log)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('losless_snippet' in test_iteration_result)
+    self.assertTrue(test_iteration_result['losless_snippet'])
+    self.assertTrue('output_snippet' in test_iteration_result)
+    self.assertEquals(log, test_iteration_result['output_snippet'])
+    self.assertTrue('output_snippet_base64' in test_iteration_result)
+    self.assertEquals('', test_iteration_result['output_snippet_base64'])
+
+  def testGenerateJsonTestResultFormatDict_passedResult(self):
+    result = base_test_result.BaseTestResult('test.package.TestName',
+                                             base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict([all_results],
+                                                                 False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'PASS', results_dict['tests']['test']['package']['TestName']['actual'])
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(1, results_dict['num_failures_by_type']['PASS'])
+    self.assertEquals(0, results_dict['num_failures_by_type']['FAIL'])
+
+  def testGenerateJsonTestResultFormatDict_failedResult(self):
+    result = base_test_result.BaseTestResult('test.package.TestName',
+                                             base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict([all_results],
+                                                                 False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'FAIL', results_dict['tests']['test']['package']['TestName']['actual'])
+    self.assertEquals(
+        True,
+        results_dict['tests']['test']['package']['TestName']['is_unexpected'])
+    self.assertEquals(2, len(results_dict['num_failures_by_type']))
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(0, results_dict['num_failures_by_type']['PASS'])
+    self.assertEquals(1, results_dict['num_failures_by_type']['FAIL'])
+
+  def testGenerateJsonTestResultFormatDict_failedResultWithRetry(self):
+    result_1 = base_test_result.BaseTestResult('test.package.TestName',
+                                               base_test_result.ResultType.FAIL)
+    run_results_1 = base_test_result.TestRunResults()
+    run_results_1.AddResult(result_1)
+
+    # Simulate a second retry with failure.
+    result_2 = base_test_result.BaseTestResult('test.package.TestName',
+                                               base_test_result.ResultType.FAIL)
+    run_results_2 = base_test_result.TestRunResults()
+    run_results_2.AddResult(result_2)
+
+    all_results = [run_results_1, run_results_2]
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict(
+        all_results, False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'FAIL FAIL',
+        results_dict['tests']['test']['package']['TestName']['actual'])
+    self.assertEquals(
+        True,
+        results_dict['tests']['test']['package']['TestName']['is_unexpected'])
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(2, len(results_dict['num_failures_by_type']))
+    self.assertEquals(0, results_dict['num_failures_by_type']['PASS'])
+
+    # According to the spec: If a test was run more than once, only the first
+    # invocation's result is included in the totals.
+    self.assertEquals(1, results_dict['num_failures_by_type']['FAIL'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/results/presentation/__init__.py b/src/build/android/pylib/results/presentation/__init__.py
new file mode 100644
index 0000000..a22a6ee
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/presentation/javascript/main_html.js b/src/build/android/pylib/results/presentation/javascript/main_html.js
new file mode 100644
index 0000000..3d94663
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/javascript/main_html.js
@@ -0,0 +1,193 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+function getArguments() {
+  // Returns the URL arguments as a dictionary.
+  args = {}
+  var s = location.search;
+  if (s) {
+    var vals = s.substring(1).split('&');
+    for (var i = 0; i < vals.length; i++) {
+      var pair = vals[i].split('=');
+      args[pair[0]] = pair[1];
+    }
+  }
+  return args;
+}
+
+function showSuiteTable(show_the_table) {
+    document.getElementById('suite-table').style.display = (
+        show_the_table ? 'table' : 'none');
+}
+
+function showTestTable(show_the_table) {
+    document.getElementById('test-table').style.display = (
+        show_the_table ? 'table' : 'none');
+}
+
+function showTestsOfOneSuiteOnly(suite_name) {
+  setTitle('Test Results of Suite: ' + suite_name)
+  show_all = (suite_name == 'TOTAL')
+  var testTableBlocks = document.getElementById('test-table')
+      .getElementsByClassName('row_block');
+  Array.prototype.slice.call(testTableBlocks)
+      .forEach(function(testTableBlock) {
+        if (!show_all) {
+          var table_block_in_suite = (testTableBlock.firstElementChild
+            .firstElementChild.firstElementChild.innerHTML)
+            .startsWith(suite_name);
+          if (!table_block_in_suite) {
+            testTableBlock.style.display = 'none';
+            return;
+          }
+        }
+        testTableBlock.style.display = 'table-row-group';
+      });
+  showTestTable(true);
+  showSuiteTable(false);
+  window.scrollTo(0, 0);
+}
+
+function showTestsOfOneSuiteOnlyWithNewState(suite_name) {
+  showTestsOfOneSuiteOnly(suite_name);
+  history.pushState({suite: suite_name}, suite_name, '');
+}
+
+function showSuiteTableOnly() {
+  setTitle('Suites Summary')
+  showTestTable(false);
+  showSuiteTable(true);
+  window.scrollTo(0, 0);
+}
+
+function showSuiteTableOnlyWithReplaceState() {
+  showSuiteTableOnly();
+  history.replaceState({}, 'suite_table', '');
+}
+
+function setBrowserBackButtonLogic() {
+  window.onpopstate = function(event) {
+    if (!event.state || !event.state.suite) {
+      showSuiteTableOnly();
+    } else {
+      showTestsOfOneSuiteOnly(event.state.suite);
+    }
+  };
+}
+
+function setTitle(title) {
+  document.getElementById('summary-header').textContent = title;
+}
+
+function sortByColumn(head) {
+  var table = head.parentNode.parentNode.parentNode;
+  var rowBlocks = Array.prototype.slice.call(
+      table.getElementsByTagName('tbody'));
+
+  // Determine whether to asc or desc and set arrows.
+  var headers = head.parentNode.getElementsByTagName('th');
+  var headIndex = Array.prototype.slice.call(headers).indexOf(head);
+  var asc = -1;
+  for (var i = 0; i < headers.length; i++) {
+    if (headers[i].dataset.ascSorted != 0) {
+      if (headers[i].dataset.ascSorted == 1) {
+          headers[i].getElementsByClassName('up')[0]
+              .style.display = 'none';
+      } else {
+        headers[i].getElementsByClassName('down')[0]
+            .style.display = 'none';
+      }
+      if (headers[i] == head) {
+        asc = headers[i].dataset.ascSorted * -1;
+      } else {
+        headers[i].dataset.ascSorted = 0;
+      }
+      break;
+    }
+  }
+  headers[headIndex].dataset.ascSorted = asc;
+  if (asc == 1) {
+      headers[headIndex].getElementsByClassName('up')[0]
+          .style.display = 'inline';
+  } else {
+      headers[headIndex].getElementsByClassName('down')[0]
+          .style.display = 'inline';
+  }
+
+  // Sort the array by the specified column number (col) and order (asc).
+  rowBlocks.sort(function (a, b) {
+    if (a.style.display == 'none') {
+      return -1;
+    } else if (b.style.display == 'none') {
+      return 1;
+    }
+    var a_rows = Array.prototype.slice.call(a.children);
+    var b_rows = Array.prototype.slice.call(b.children);
+    if (head.className == "text") {
+      // If sorting by text, we only compare the entry on the first row.
+      var aInnerHTML = a_rows[0].children[headIndex].innerHTML;
+      var bInnerHTML = b_rows[0].children[headIndex].innerHTML;
+      return (aInnerHTML == bInnerHTML) ? 0 : (
+          (aInnerHTML > bInnerHTML) ? asc : -1 * asc);
+    } else if (head.className == "number") {
+      // If sorting by number, for example, duration,
+      // we will sum up the durations of different test runs
+      // for one specific test case and sort by the sum.
+      var avalue = 0;
+      var bvalue = 0;
+      a_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        avalue += Number(row.children[index].innerHTML);
+      });
+      b_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        bvalue += Number(row.children[index].innerHTML);
+      });
+    } else if (head.className == "flaky") {
+      // Flakiness = (#total - #success - #skipped) / (#total - #skipped)
+      var a_success_or_skipped = 0;
+      var a_skipped = 0;
+      var b_success_or_skipped = 0;
+      var b_skipped = 0;
+      a_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        var status = row.children[index].innerHTML.trim();
+        if (status == 'SUCCESS') {
+          a_success_or_skipped += 1;
+        }
+        if (status == 'SKIPPED') {
+          a_success_or_skipped += 1;
+          a_skipped += 1;
+        }
+      });
+      b_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        var status = row.children[index].innerHTML.trim();
+        if (status == 'SUCCESS') {
+          b_success_or_skipped += 1;
+        }
+        if (status == 'SKIPPED') {
+          b_success_or_skipped += 1;
+          b_skipped += 1;
+        }
+      });
+      var atotal_minus_skipped = a_rows.length - a_skipped;
+      var btotal_minus_skipped = b_rows.length - b_skipped;
+
+      var avalue = ((atotal_minus_skipped == 0) ? -1 :
+          (a_rows.length - a_success_or_skipped) / atotal_minus_skipped);
+      var bvalue = ((btotal_minus_skipped == 0) ? -1 :
+          (b_rows.length - b_success_or_skipped) / btotal_minus_skipped);
+    }
+    return asc * (avalue - bvalue);
+  });
+
+  for (var i = 0; i < rowBlocks.length; i++) {
+    table.appendChild(rowBlocks[i]);
+  }
+}
+
+function sortSuiteTableByFailedTestCases() {
+  sortByColumn(document.getElementById('number_fail_tests'));
+}
diff --git a/src/build/android/pylib/results/presentation/standard_gtest_merge.py b/src/build/android/pylib/results/presentation/standard_gtest_merge.py
new file mode 100755
index 0000000..58a2936
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -0,0 +1,173 @@
+#! /usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import sys
+
+
+def merge_shard_results(summary_json, jsons_to_merge):
+  """Reads JSON test output from all shards and combines them into one.
+
+  Returns dict with merged test output on success or None on failure. Emits
+  annotations.
+  """
+  try:
+    with open(summary_json) as f:
+      summary = json.load(f)
+  except (IOError, ValueError):
+    raise Exception('Summary json cannot be loaded.')
+
+  # Merge all JSON files together. Keep track of missing shards.
+  merged = {
+    'all_tests': set(),
+    'disabled_tests': set(),
+    'global_tags': set(),
+    'missing_shards': [],
+    'per_iteration_data': [],
+    'swarming_summary': summary,
+    'links': set()
+  }
+  for index, result in enumerate(summary['shards']):
+    if result is None:
+      merged['missing_shards'].append(index)
+      continue
+
+    # Author note: this code path doesn't trigger convert_to_old_format() in
+    # client/swarming.py, which means the state enum is saved in its string
+    # name form, not in the number form.
+    state = result.get('state')
+    if state == u'BOT_DIED':
+      print(
+          'Shard #%d had a Swarming internal failure' % index, file=sys.stderr)
+    elif state == u'EXPIRED':
+      print('There wasn\'t enough capacity to run your test', file=sys.stderr)
+    elif state == u'TIMED_OUT':
+      print('Test runtime exceeded allocated time'
+            'Either it ran for too long (hard timeout) or it didn\'t produce '
+            'I/O for an extended period of time (I/O timeout)',
+            file=sys.stderr)
+    elif state != u'COMPLETED':
+      print('Invalid Swarming task state: %s' % state, file=sys.stderr)
+
+    json_data, err_msg = load_shard_json(index, result.get('task_id'),
+                                         jsons_to_merge)
+    if json_data:
+      # Set-like fields.
+      for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+        merged[key].update(json_data.get(key), [])
+
+      # 'per_iteration_data' is a list of dicts. Dicts should be merged
+      # together, not the 'per_iteration_data' list itself.
+      merged['per_iteration_data'] = merge_list_of_dicts(
+          merged['per_iteration_data'], json_data.get('per_iteration_data', []))
+    else:
+      merged['missing_shards'].append(index)
+      print('No result was found: %s' % err_msg, file=sys.stderr)
+
+  # If some shards are missing, make it known. Continue parsing anyway. Step
+  # should be red anyway, since swarming.py return non-zero exit code in that
+  # case.
+  if merged['missing_shards']:
+    as_str = ', '.join([str(shard) for shard in merged['missing_shards']])
+    print('some shards did not complete: %s' % as_str, file=sys.stderr)
+    # Not all tests run, combined JSON summary can not be trusted.
+    merged['global_tags'].add('UNRELIABLE_RESULTS')
+
+  # Convert to jsonish dict.
+  for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+    merged[key] = sorted(merged[key])
+  return merged
+
+
+OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024  # 100 MB
+
+
+def load_shard_json(index, task_id, jsons_to_merge):
+  """Reads JSON output of the specified shard.
+
+  Args:
+    output_dir: The directory in which to look for the JSON output to load.
+    index: The index of the shard to load data for, this is for old api.
+    task_id: The directory of the shard to load data for, this is for new api.
+
+  Returns: A tuple containing:
+    * The contents of path, deserialized into a python object.
+    * An error string.
+    (exactly one of the tuple elements will be non-None).
+  """
+  matching_json_files = [
+      j for j in jsons_to_merge
+      if (os.path.basename(j) == 'output.json' and
+          (os.path.basename(os.path.dirname(j)) == str(index) or
+           os.path.basename(os.path.dirname(j)) == task_id))]
+
+  if not matching_json_files:
+    print('shard %s test output missing' % index, file=sys.stderr)
+    return (None, 'shard %s test output was missing' % index)
+  elif len(matching_json_files) > 1:
+    print('duplicate test output for shard %s' % index, file=sys.stderr)
+    return (None, 'shard %s test output was duplicated' % index)
+
+  path = matching_json_files[0]
+
+  try:
+    filesize = os.stat(path).st_size
+    if filesize > OUTPUT_JSON_SIZE_LIMIT:
+      print(
+          'output.json is %d bytes. Max size is %d' % (filesize,
+                                                       OUTPUT_JSON_SIZE_LIMIT),
+          file=sys.stderr)
+      return (None, 'shard %s test output exceeded the size limit' % index)
+
+    with open(path) as f:
+      return (json.load(f), None)
+  except (IOError, ValueError, OSError) as e:
+    print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
+    print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
+
+    return (None, 'shard %s test output was missing or invalid' % index)
+
+
+def merge_list_of_dicts(left, right):
+  """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
+  output = []
+  for i in xrange(max(len(left), len(right))):
+    left_dict = left[i] if i < len(left) else {}
+    right_dict = right[i] if i < len(right) else {}
+    merged_dict = left_dict.copy()
+    merged_dict.update(right_dict)
+    output.append(merged_dict)
+  return output
+
+
+def standard_gtest_merge(
+    output_json, summary_json, jsons_to_merge):
+
+  output = merge_shard_results(summary_json, jsons_to_merge)
+  with open(output_json, 'wb') as f:
+    json.dump(output, f)
+
+  return 0
+
+
+def main(raw_args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--summary-json')
+  parser.add_argument('-o', '--output-json', required=True)
+  parser.add_argument('jsons_to_merge', nargs='*')
+
+  args = parser.parse_args(raw_args)
+
+  return standard_gtest_merge(
+      args.output_json, args.summary_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/pylib/results/presentation/template/main.html b/src/build/android/pylib/results/presentation/template/main.html
new file mode 100644
index 0000000..e30d7d3
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/template/main.html
@@ -0,0 +1,93 @@
+<!DOCTYPE html>
+<html>
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+    <style>
+      body {
+        background-color: #fff;
+        color: #333;
+        font-family: Verdana, sans-serif;
+        font-size: 10px;
+        margin-left: 30px;
+        margin-right: 30px;
+        margin-top: 20px;
+        margin-bottom: 50px;
+        padding: 0;
+      }
+      table, th, td {
+        border: 1px solid black;
+        border-collapse: collapse;
+        text-align: center;
+      }
+      table, td {
+        padding: 0.1em 1em 0.1em 1em;
+      }
+      th {
+        cursor: pointer;
+        padding: 0.2em 1.5em 0.2em 1.5em;
+      }
+      table {
+        width: 100%;
+      }
+      .center {
+        text-align: center;
+      }
+      .left {
+        text-align: left;
+      }
+      a {
+        cursor: pointer;
+        text-decoration: underline;
+      }
+      a:link,a:visited,a:active {
+        color: #444;
+      }
+      .row_block:hover {
+        background-color: #F6F6F6;
+      }
+      .skipped, .success, .failure {
+        border-color: #000000;
+      }
+      .success {
+        color: #000;
+        background-color: #8d4;
+      }
+      .failure {
+        color: #000;
+        background-color: #e88;
+      }
+      .skipped {
+        color: #000;
+        background: #AADDEE;
+      }
+    </style>
+    <script type="text/javascript">
+      {% include "javascript/main_html.js" %}
+    </script>
+  </head>
+  <body>
+    <div>
+      <h2 id="summary-header"></h2>
+      {% for tb_value in tb_values %}
+        {% include 'template/table.html' %}
+      {% endfor %}
+    </div>
+  {% if feedback_url %}
+    </br>
+      <a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
+    </body>
+  {%- endif %}
+  <script>
+    sortSuiteTableByFailedTestCases();
+    showSuiteTableOnlyWithReplaceState();
+    // Enable sorting for each column of tables.
+    Array.prototype.slice.call(document.getElementsByTagName('th'))
+        .forEach(function(head) {
+            head.addEventListener(
+                "click",
+                function() { sortByColumn(head); });
+        }
+    );
+    setBrowserBackButtonLogic();
+  </script>
+</html>
diff --git a/src/build/android/pylib/results/presentation/template/table.html b/src/build/android/pylib/results/presentation/template/table.html
new file mode 100644
index 0000000..4240043
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/template/table.html
@@ -0,0 +1,60 @@
+<table id="{{tb_value.table_id}}" style="display:none;">
+  <thead class="heads">
+    <tr>
+      {% for cell in tb_value.table_headers -%}
+        <th class="{{cell.class}}" id="{{cell.data}}" data-asc-sorted=0>
+          {{cell.data}}
+          <span class="up" style="display:none;"> &#8593</span>
+          <span class="down" style="display:none;"> &#8595</span>
+        </th>
+      {%- endfor %}
+    </tr>
+  </thead>
+  {% for block in tb_value.table_row_blocks -%}
+    <tbody class="row_block">
+      {% for row in block -%}
+        <tr class="{{tb_value.table_id}}-body-row">
+          {% for cell in row -%}
+            {% if cell.rowspan -%}
+              <td rowspan="{{cell.rowspan}}" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+            {%- else -%}
+              <td rowspan="1" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+            {%- endif %}
+            {% if cell.cell_type == 'pre' -%}
+              <pre>{{cell.data}}</pre>
+            {%- elif cell.cell_type == 'links' -%}
+              {% for link in cell.links -%}
+                <a href="{{link.href}}" target="{{link.target}}">{{link.data}}</a>
+                {% if not loop.last -%}
+                  <br />
+                {%- endif %}
+              {%- endfor %}
+            {%- elif cell.cell_type == 'action' -%}
+              <a onclick="{{cell.action}}">{{cell.data}}</a>
+            {%- else -%}
+              {{cell.data}}
+            {%- endif %}
+            </td>
+          {%- endfor %}
+        </tr>
+      {%- endfor %}
+    </tbody>
+  {%- endfor %}
+  <tfoot>
+    <tr>
+        {% for cell in tb_value.table_footer -%}
+          <td class="{{tb_value.table_id}}-summary-column-{{loop.index0}} {{cell.class}}">
+            {% if cell.cell_type == 'links' -%}
+              {% for link in cell.links -%}
+                <a href="{{link.href}}" target="{{link.target}}"><b>{{link.data}}</b></a>
+              {%- endfor %}
+            {%- elif cell.cell_type == 'action' -%}
+              <a onclick="{{cell.action}}">{{cell.data}}</a>
+            {%- else -%}
+              <b>{{cell.data}}</b>
+            {%- endif %}
+          </td>
+        {%- endfor %}
+      </tr>
+  </tfoot>
+</table>
diff --git a/src/build/android/pylib/results/presentation/test_results_presentation.py b/src/build/android/pylib/results/presentation/test_results_presentation.py
new file mode 100755
index 0000000..33fae04
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/test_results_presentation.py
@@ -0,0 +1,547 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import json
+import logging
+import tempfile
+import os
+import sys
+import urllib
+
+
+CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.abspath(os.path.join(
+    CURRENT_DIR, '..', '..', '..', '..', '..'))
+
+sys.path.append(os.path.join(BASE_DIR, 'build', 'android'))
+from pylib.results.presentation import standard_gtest_merge
+from pylib.utils import google_storage_helper  # pylint: disable=import-error
+
+sys.path.append(os.path.join(BASE_DIR, 'third_party'))
+import jinja2  # pylint: disable=import-error
+JINJA_ENVIRONMENT = jinja2.Environment(
+    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+    autoescape=True)
+
+
+def cell(data, html_class='center'):
+  """Formats table cell data for processing in jinja template."""
+  return {
+    'data': data,
+    'class': html_class,
+  }
+
+
+def pre_cell(data, html_class='center'):
+  """Formats table <pre> cell data for processing in jinja template."""
+  return {
+    'cell_type': 'pre',
+    'data': data,
+    'class': html_class,
+  }
+
+
+class LinkTarget(object):
+  # Opens the linked document in a new window or tab.
+  NEW_TAB = '_blank'
+  # Opens the linked document in the same frame as it was clicked.
+  CURRENT_TAB = '_self'
+
+
+def link(data, href, target=LinkTarget.CURRENT_TAB):
+  """Formats <a> tag data for processing in jinja template.
+
+  Args:
+    data: String link appears as on HTML page.
+    href: URL where link goes.
+    target: Where link should be opened (e.g. current tab or new tab).
+  """
+  return {
+    'data': data,
+    'href': href,
+    'target': target,
+  }
+
+
+def links_cell(links, html_class='center', rowspan=None):
+  """Formats table cell with links for processing in jinja template.
+
+  Args:
+    links: List of link dictionaries. Use |link| function to generate them.
+    html_class: Class for table cell.
+    rowspan: Rowspan HTML attribute.
+  """
+  return {
+    'cell_type': 'links',
+    'class': html_class,
+    'links': links,
+    'rowspan': rowspan,
+  }
+
+
+def action_cell(action, data, html_class):
+  """Formats table cell with javascript actions.
+
+  Args:
+    action: Javscript action.
+    data: Data in cell.
+    class: Class for table cell.
+  """
+  return {
+    'cell_type': 'action',
+    'action': action,
+    'data': data,
+    'class': html_class,
+  }
+
+
+def flakiness_dashbord_link(test_name, suite_name):
+  url_args = urllib.urlencode([
+      ('testType', suite_name),
+      ('tests', test_name)])
+  return ('https://test-results.appspot.com/'
+         'dashboards/flakiness_dashboard.html#%s' % url_args)
+
+
+def logs_cell(result, test_name, suite_name):
+  """Formats result logs data for processing in jinja template."""
+  link_list = []
+  result_link_dict = result.get('links', {})
+  result_link_dict['flakiness'] = flakiness_dashbord_link(
+      test_name, suite_name)
+  for name, href in sorted(result_link_dict.items()):
+    link_list.append(link(
+        data=name,
+        href=href,
+        target=LinkTarget.NEW_TAB))
+  if link_list:
+    return links_cell(link_list)
+  else:
+    return cell('(no logs)')
+
+
+def code_search(test, cs_base_url):
+  """Returns URL for test on codesearch."""
+  search = test.replace('#', '.')
+  return '%s/search/?q=%s&type=cs' % (cs_base_url, search)
+
+
+def status_class(status):
+  """Returns HTML class for test status."""
+  if not status:
+    return 'failure unknown'
+  status = status.lower()
+  if status not in ('success', 'skipped'):
+    return 'failure %s' % status
+  return status
+
+
+def create_test_table(results_dict, cs_base_url, suite_name):
+  """Format test data for injecting into HTML table."""
+
+  header_row = [
+    cell(data='test_name', html_class='text'),
+    cell(data='status', html_class='flaky'),
+    cell(data='elapsed_time_ms', html_class='number'),
+    cell(data='logs', html_class='text'),
+    cell(data='output_snippet', html_class='text'),
+  ]
+
+  test_row_blocks = []
+  for test_name, test_results in results_dict.iteritems():
+    test_runs = []
+    for index, result in enumerate(test_results):
+      if index == 0:
+        test_run = [links_cell(
+            links=[
+                link(href=code_search(test_name, cs_base_url),
+                     target=LinkTarget.NEW_TAB,
+                     data=test_name)],
+            rowspan=len(test_results),
+            html_class='left %s' % test_name
+        )]                                          # test_name
+      else:
+        test_run = []
+
+      test_run.extend([
+          cell(data=result['status'] or 'UNKNOWN',
+                                                    # status
+               html_class=('center %s' %
+                  status_class(result['status']))),
+          cell(data=result['elapsed_time_ms']),     # elapsed_time_ms
+          logs_cell(result, test_name, suite_name), # logs
+          pre_cell(data=result['output_snippet'],   # output_snippet
+                   html_class='left'),
+      ])
+      test_runs.append(test_run)
+    test_row_blocks.append(test_runs)
+  return header_row, test_row_blocks
+
+
+def create_suite_table(results_dict):
+  """Format test suite data for injecting into HTML table."""
+
+  SUCCESS_COUNT_INDEX = 1
+  FAIL_COUNT_INDEX = 2
+  ALL_COUNT_INDEX = 3
+  TIME_INDEX = 4
+
+  header_row = [
+    cell(data='suite_name', html_class='text'),
+    cell(data='number_success_tests', html_class='number'),
+    cell(data='number_fail_tests', html_class='number'),
+    cell(data='all_tests', html_class='number'),
+    cell(data='elapsed_time_ms', html_class='number'),
+  ]
+
+  footer_row = [
+    action_cell(
+          'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
+          'TOTAL',
+          'center'
+        ),         # TOTAL
+    cell(data=0),  # number_success_tests
+    cell(data=0),  # number_fail_tests
+    cell(data=0),  # all_tests
+    cell(data=0),  # elapsed_time_ms
+  ]
+
+  suite_row_dict = {}
+  for test_name, test_results in results_dict.iteritems():
+    # TODO(mikecase): This logic doesn't work if there are multiple test runs.
+    # That is, if 'per_iteration_data' has multiple entries.
+    # Since we only care about the result of the last test run.
+    result = test_results[-1]
+
+    suite_name = (test_name.split('#')[0] if '#' in test_name
+                  else test_name.split('.')[0])
+    if suite_name in suite_row_dict:
+      suite_row = suite_row_dict[suite_name]
+    else:
+      suite_row = [
+        action_cell(
+          'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
+          suite_name,
+          'left'
+        ),             # suite_name
+        cell(data=0),  # number_success_tests
+        cell(data=0),  # number_fail_tests
+        cell(data=0),  # all_tests
+        cell(data=0),  # elapsed_time_ms
+      ]
+
+    suite_row_dict[suite_name] = suite_row
+
+    suite_row[ALL_COUNT_INDEX]['data'] += 1
+    footer_row[ALL_COUNT_INDEX]['data'] += 1
+
+    if result['status'] == 'SUCCESS':
+      suite_row[SUCCESS_COUNT_INDEX]['data'] += 1
+      footer_row[SUCCESS_COUNT_INDEX]['data'] += 1
+    elif result['status'] != 'SKIPPED':
+      suite_row[FAIL_COUNT_INDEX]['data'] += 1
+      footer_row[FAIL_COUNT_INDEX]['data'] += 1
+
+    # Some types of crashes can have 'null' values for elapsed_time_ms.
+    if result['elapsed_time_ms'] is not None:
+      suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+      footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+
+  for suite in suite_row_dict.values():
+    if suite[FAIL_COUNT_INDEX]['data'] > 0:
+      suite[FAIL_COUNT_INDEX]['class'] += ' failure'
+    else:
+      suite[FAIL_COUNT_INDEX]['class'] += ' success'
+
+  if footer_row[FAIL_COUNT_INDEX]['data'] > 0:
+    footer_row[FAIL_COUNT_INDEX]['class'] += ' failure'
+  else:
+    footer_row[FAIL_COUNT_INDEX]['class'] += ' success'
+
+  return (header_row,
+          [[suite_row] for suite_row in suite_row_dict.values()],
+          footer_row)
+
+
+def feedback_url(result_details_link):
+  # pylint: disable=redefined-variable-type
+  url_args = [
+      ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
+      ('summary', 'Result Details Feedback:'),
+      ('components', 'Test>Android'),
+  ]
+  if result_details_link:
+    url_args.append(('comment', 'Please check out: %s' % result_details_link))
+  url_args = urllib.urlencode(url_args)
+  # pylint: enable=redefined-variable-type
+  return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
+
+
+def results_to_html(results_dict, cs_base_url, bucket, test_name,
+                    builder_name, build_number, local_output):
+  """Convert list of test results into html format.
+
+  Args:
+    local_output: Whether this results file is uploaded to Google Storage or
+        just a local file.
+  """
+  test_rows_header, test_rows = create_test_table(
+      results_dict, cs_base_url, test_name)
+  suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
+      results_dict)
+
+  suite_table_values = {
+    'table_id': 'suite-table',
+    'table_headers': suite_rows_header,
+    'table_row_blocks': suite_rows,
+    'table_footer': suite_row_footer,
+  }
+
+  test_table_values = {
+    'table_id': 'test-table',
+    'table_headers': test_rows_header,
+    'table_row_blocks': test_rows,
+  }
+
+  main_template = JINJA_ENVIRONMENT.get_template(
+      os.path.join('template', 'main.html'))
+
+  if local_output:
+    html_render = main_template.render(  #  pylint: disable=no-member
+        {
+          'tb_values': [suite_table_values, test_table_values],
+          'feedback_url': feedback_url(None),
+        })
+    return (html_render, None, None)
+  else:
+    dest = google_storage_helper.unique_name(
+        '%s_%s_%s' % (test_name, builder_name, build_number))
+    result_details_link = google_storage_helper.get_url_link(
+        dest, '%s/html' % bucket)
+    html_render = main_template.render(  #  pylint: disable=no-member
+        {
+          'tb_values': [suite_table_values, test_table_values],
+          'feedback_url': feedback_url(result_details_link),
+        })
+    return (html_render, dest, result_details_link)
+
+
+def result_details(json_path, test_name, cs_base_url, bucket=None,
+                   builder_name=None, build_number=None, local_output=False):
+  """Get result details from json path and then convert results to html.
+
+  Args:
+    local_output: Whether this results file is uploaded to Google Storage or
+        just a local file.
+  """
+
+  with open(json_path) as json_file:
+    json_object = json.loads(json_file.read())
+
+  if not 'per_iteration_data' in json_object:
+    return 'Error: json file missing per_iteration_data.'
+
+  results_dict = collections.defaultdict(list)
+  for testsuite_run in json_object['per_iteration_data']:
+    for test, test_runs in testsuite_run.iteritems():
+      results_dict[test].extend(test_runs)
+  return results_to_html(results_dict, cs_base_url, bucket, test_name,
+                         builder_name, build_number, local_output)
+
+
+def upload_to_google_bucket(html, bucket, dest):
+  with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
+    temp_file.write(html)
+    temp_file.flush()
+    return google_storage_helper.upload(
+        name=dest,
+        filepath=temp_file.name,
+        bucket='%s/html' % bucket,
+        content_type='text/html',
+        authenticated_link=True)
+
+
+def ui_screenshot_set(json_path):
+  with open(json_path) as json_file:
+    json_object = json.loads(json_file.read())
+  if not 'per_iteration_data' in json_object:
+    # This will be reported as an error by result_details, no need to duplicate.
+    return None
+  ui_screenshots = []
+  # pylint: disable=too-many-nested-blocks
+  for testsuite_run in json_object['per_iteration_data']:
+    for _, test_runs in testsuite_run.iteritems():
+      for test_run in test_runs:
+        if 'ui screenshot' in test_run['links']:
+          screenshot_link = test_run['links']['ui screenshot']
+          if screenshot_link.startswith('file:'):
+            with contextlib.closing(urllib.urlopen(screenshot_link)) as f:
+              test_screenshots = json.load(f)
+          else:
+            # Assume anything that isn't a file link is a google storage link
+            screenshot_string = google_storage_helper.read_from_link(
+                screenshot_link)
+            if not screenshot_string:
+              logging.error('Bad screenshot link %s', screenshot_link)
+              continue
+            test_screenshots = json.loads(
+                screenshot_string)
+          ui_screenshots.extend(test_screenshots)
+  # pylint: enable=too-many-nested-blocks
+
+  if ui_screenshots:
+    return json.dumps(ui_screenshots)
+  return None
+
+
+def upload_screenshot_set(json_path, test_name, bucket, builder_name,
+                          build_number):
+  screenshot_set = ui_screenshot_set(json_path)
+  if not screenshot_set:
+    return None
+  dest = google_storage_helper.unique_name(
+    'screenshots_%s_%s_%s' % (test_name, builder_name, build_number),
+    suffix='.json')
+  with tempfile.NamedTemporaryFile(suffix='.json') as temp_file:
+    temp_file.write(screenshot_set)
+    temp_file.flush()
+    return google_storage_helper.upload(
+        name=dest,
+        filepath=temp_file.name,
+        bucket='%s/json' % bucket,
+        content_type='application/json',
+        authenticated_link=True)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--json-file', help='Path of json file.')
+  parser.add_argument('--cs-base-url', help='Base url for code search.',
+                      default='http://cs.chromium.org')
+  parser.add_argument('--bucket', help='Google storage bucket.', required=True)
+  parser.add_argument('--builder-name', help='Builder name.')
+  parser.add_argument('--build-number', help='Build number.')
+  parser.add_argument('--test-name', help='The name of the test.',
+                      required=True)
+  parser.add_argument(
+      '-o', '--output-json',
+      help='(Swarming Merge Script API) '
+           'Output JSON file to create.')
+  parser.add_argument(
+      '--build-properties',
+      help='(Swarming Merge Script API) '
+           'Build property JSON file provided by recipes.')
+  parser.add_argument(
+      '--summary-json',
+      help='(Swarming Merge Script API) '
+           'Summary of shard state running on swarming. '
+           '(Output of the swarming.py collect '
+           '--task-summary-json=XXX command.)')
+  parser.add_argument(
+      '--task-output-dir',
+      help='(Swarming Merge Script API) '
+           'Directory containing all swarming task results.')
+  parser.add_argument(
+      'positional', nargs='*',
+      help='output.json from shards.')
+
+  args = parser.parse_args()
+
+  if ((args.build_properties is None) ==
+         (args.build_number is None or args.builder_name is None)):
+    raise parser.error('Exactly one of build_perperties or '
+                       '(build_number or builder_name) should be given.')
+
+  if (args.build_number is None) != (args.builder_name is None):
+    raise parser.error('args.build_number and args.builder_name '
+                       'has to be be given together'
+                       'or not given at all.')
+
+  if len(args.positional) == 0 and args.json_file is None:
+    if args.output_json:
+      with open(args.output_json, 'w') as f:
+        json.dump({}, f)
+    return
+  elif len(args.positional) != 0 and args.json_file:
+    raise parser.error('Exactly one of args.positional and '
+                       'args.json_file should be given.')
+
+  if args.build_properties:
+    build_properties = json.loads(args.build_properties)
+    if ((not 'buildnumber' in build_properties) or
+        (not 'buildername' in build_properties)):
+      raise parser.error('Build number/builder name not specified.')
+    build_number = build_properties['buildnumber']
+    builder_name = build_properties['buildername']
+  elif args.build_number and args.builder_name:
+    build_number = args.build_number
+    builder_name = args.builder_name
+
+  if args.positional:
+    if len(args.positional) == 1:
+      json_file = args.positional[0]
+    else:
+      if args.output_json and args.summary_json:
+        standard_gtest_merge.standard_gtest_merge(
+            args.output_json, args.summary_json, args.positional)
+        json_file = args.output_json
+      elif not args.output_json:
+        raise Exception('output_json required by merge API is missing.')
+      else:
+        raise Exception('summary_json required by merge API is missing.')
+  elif args.json_file:
+    json_file = args.json_file
+
+  if not os.path.exists(json_file):
+    raise IOError('--json-file %s not found.' % json_file)
+
+  # Link to result details presentation page is a part of the page.
+  result_html_string, dest, result_details_link = result_details(
+      json_file, args.test_name, args.cs_base_url, args.bucket,
+      builder_name, build_number)
+
+  result_details_link_2 = upload_to_google_bucket(
+      result_html_string.encode('UTF-8'),
+      args.bucket, dest)
+  assert result_details_link == result_details_link_2, (
+      'Result details link do not match. The link returned by get_url_link'
+      ' should be the same as that returned by upload.')
+
+  ui_screenshot_set_link = upload_screenshot_set(json_file, args.test_name,
+      args.bucket, builder_name, build_number)
+
+  if ui_screenshot_set_link:
+    ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/'
+    ui_catalog_query = urllib.urlencode(
+        {'screenshot_source': ui_screenshot_set_link})
+    ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query)
+
+  if args.output_json:
+    with open(json_file) as original_json_file:
+      json_object = json.load(original_json_file)
+      json_object['links'] = {
+          'result_details (logcats, flakiness links)': result_details_link
+      }
+
+      if ui_screenshot_set_link:
+        json_object['links']['ui screenshots'] = ui_screenshot_link
+
+      with open(args.output_json, 'w') as f:
+        json.dump(json_object, f)
+  else:
+    print('Result Details: %s' % result_details_link)
+
+    if ui_screenshot_set_link:
+      print('UI Screenshots %s' % ui_screenshot_link)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/results/report_results.py b/src/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000..56eefac
--- /dev/null
+++ b/src/build/android/pylib/results/report_results.py
@@ -0,0 +1,136 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+from __future__ import print_function
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+from pylib.utils import logging_utils
+
+
+def _LogToFile(results, test_type, suite_name):
+  """Log results to local files which can be used for aggregation later."""
+  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+  if not os.path.exists(log_file_path):
+    os.mkdir(log_file_path)
+  full_file_name = os.path.join(
+      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+  if not os.path.exists(full_file_name):
+    with open(full_file_name, 'w') as log_file:
+      print(
+          '\n%s results for %s build %s:' %
+          (test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+           os.environ.get('BUILDBOT_BUILDNUMBER')),
+          file=log_file)
+    logging.info('Writing results to %s.', full_file_name)
+
+  logging.info('Writing results to %s.', full_file_name)
+  with open(full_file_name, 'a') as log_file:
+    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+    print(
+        '%s%s' % (shortened_suite_name.ljust(30), results.GetShortForm()),
+        file=log_file)
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+                             flakiness_server):
+  """Upload results to the flakiness dashboard"""
+  logging.info('Upload results for test type "%s", test package "%s" to %s',
+               test_type, test_package, flakiness_server)
+
+  try:
+    # TODO(jbudorick): remove Instrumentation once instrumentation tests
+    # switch to platform mode.
+    if test_type in ('instrumentation', 'Instrumentation'):
+      if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+        assert test_package in ['ContentShellTest',
+                                'ChromePublicTest',
+                                'ChromeSyncShellTest',
+                                'SystemWebViewShellLayoutTest',
+                                'WebViewInstrumentationTest']
+        dashboard_test_type = ('%s_instrumentation_tests' %
+                               test_package.lower().rstrip('test'))
+      # Downstream server.
+      else:
+        dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+    elif test_type == 'gtest':
+      dashboard_test_type = test_package
+
+    else:
+      logging.warning('Invalid test type')
+      return
+
+    results_uploader.Upload(
+        results, flakiness_server, dashboard_test_type)
+
+  except Exception: # pylint: disable=broad-except
+    logging.exception('Failure while logging to %s', flakiness_server)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+            flakiness_server=None):
+  """Log the tests results for the test suite.
+
+  The results will be logged three different ways:
+    1. Log to stdout.
+    2. Log to local files for aggregating multiple test steps
+       (on buildbots only).
+    3. Log to flakiness dashboard (on buildbots only).
+
+  Args:
+    results: An instance of TestRunResults object.
+    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+    test_package: Test package name (e.g. 'ipc_tests' for gtests,
+                  'ContentShellTest' for instrumentation tests)
+    annotation: If instrumenation test type, this is a list of annotations
+                (e.g. ['Feature', 'SmallTest']).
+    flakiness_server: If provider, upload the results to flakiness dashboard
+                      with this URL.
+    """
+  # pylint doesn't like how colorama set up its color enums.
+  # pylint: disable=no-member
+  black_on_white = (logging_utils.BACK.WHITE, logging_utils.FORE.BLACK)
+  with logging_utils.OverrideColor(logging.CRITICAL, black_on_white):
+    if not results.DidRunPass():
+      logging.critical('*' * 80)
+      logging.critical('Detailed Logs')
+      logging.critical('*' * 80)
+      for line in results.GetLogs().splitlines():
+        logging.critical(line)
+    logging.critical('*' * 80)
+    logging.critical('Summary')
+    logging.critical('*' * 80)
+    for line in results.GetGtestForm().splitlines():
+      color = black_on_white
+      if 'FAILED' in line:
+        # Red on white, dim.
+        color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED,
+                 logging_utils.STYLE.DIM)
+      elif 'PASSED' in line:
+        # Green on white, dim.
+        color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN,
+                 logging_utils.STYLE.DIM)
+      with logging_utils.OverrideColor(logging.CRITICAL, color):
+        logging.critical(line)
+    logging.critical('*' * 80)
+
+  if os.environ.get('BUILDBOT_BUILDERNAME'):
+    # It is possible to have multiple buildbot steps for the same
+    # instrumenation test package using different annotations.
+    if annotation and len(annotation) == 1:
+      suite_name = annotation[0]
+    else:
+      suite_name = test_package
+    _LogToFile(results, test_type, suite_name)
+
+    if flakiness_server:
+      _LogToFlakinessDashboard(results, test_type, test_package,
+                               flakiness_server)
diff --git a/src/build/android/pylib/symbols/__init__.py b/src/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/symbols/__init__.py
diff --git a/src/build/android/pylib/symbols/apk_lib_dump.py b/src/build/android/pylib/symbols/apk_lib_dump.py
new file mode 100755
index 0000000..ba87026
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_lib_dump.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump shared library information from an APK file.
+
+This script is used to dump which *uncompressed* native shared libraries an
+APK contains, as well as their position within the file. This is mostly useful
+to diagnose logcat and tombstone symbolization issues when the libraries are
+loaded directly from the APK at runtime.
+
+The default format will print one line per uncompressed shared library with the
+following format:
+
+  0x<start-offset> 0x<end-offset> 0x<file-size> <file-path>
+
+The --format=python option can be used to dump the same information that is
+easy to use in a Python script, e.g. with a line like:
+
+  (0x<start-offset>, 0x<end-offset>, 0x<file-size>, <file-path>),
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+
+from pylib.symbols import apk_native_libs
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__,
+      formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  parser.add_argument('apk', help='Input APK file path.')
+
+  parser.add_argument('--format', help='Select output format',
+                      default='default', choices=['default', 'python'])
+
+  args = parser.parse_args()
+
+  apk_reader = apk_native_libs.ApkReader(args.apk)
+  lib_map = apk_native_libs.ApkNativeLibraries(apk_reader)
+  for lib_path, file_offset, file_size in lib_map.GetDumpList():
+    if args.format == 'python':
+      print('(0x%08x, 0x%08x, 0x%08x, \'%s\'),' %
+            (file_offset, file_offset + file_size, file_size, lib_path))
+    else:
+      print('0x%08x 0x%08x 0x%08x %s' % (file_offset, file_offset + file_size,
+                                         file_size, lib_path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/symbols/apk_native_libs.py b/src/build/android/pylib/symbols/apk_native_libs.py
new file mode 100644
index 0000000..c4af202
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_native_libs.py
@@ -0,0 +1,419 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import struct
+import zipfile
+
+# The default zipfile python module cannot open APKs properly, but this
+# fixes it. Note that simply importing this file is sufficient to
+# ensure that zip works correctly for all other modules. See:
+# http://bugs.python.org/issue14315
+# https://hg.python.org/cpython/rev/6dd5e9556a60#l2.8
+def _PatchZipFile():
+  # pylint: disable=protected-access
+  oldDecodeExtra = zipfile.ZipInfo._decodeExtra
+  def decodeExtra(self):
+    try:
+      oldDecodeExtra(self)
+    except struct.error:
+      pass
+  zipfile.ZipInfo._decodeExtra = decodeExtra
+_PatchZipFile()
+
+
+class ApkZipInfo(object):
+  """Models a single file entry from an ApkReader.
+
+  This is very similar to the zipfile.ZipInfo class. It provides a few
+  properties describing the entry:
+    - filename          (same as ZipInfo.filename)
+    - file_size         (same as ZipInfo.file_size)
+    - compress_size     (same as ZipInfo.file_size)
+    - file_offset       (note: not provided by ZipInfo)
+
+  And a few useful methods: IsCompressed() and IsElfFile().
+
+  Entries can be created by using ApkReader() methods.
+  """
+  def __init__(self, zip_file, zip_info):
+    """Construct instance. Do not call this directly. Use ApkReader methods."""
+    self._file = zip_file
+    self._info = zip_info
+    self._file_offset = None
+
+  @property
+  def filename(self):
+    """Entry's file path within APK."""
+    return self._info.filename
+
+  @property
+  def file_size(self):
+    """Entry's extracted file size in bytes."""
+    return self._info.file_size
+
+  @property
+  def compress_size(self):
+    """Entry' s compressed file size in bytes."""
+    return self._info.compress_size
+
+  @property
+  def file_offset(self):
+    """Entry's starting file offset in the APK."""
+    if self._file_offset is None:
+      self._file_offset = self._ZipFileOffsetFromLocalHeader(
+          self._file.fp, self._info.header_offset)
+    return self._file_offset
+
+  def __repr__(self):
+    """Convert to string for debugging."""
+    return 'ApkZipInfo["%s",size=0x%x,compressed=0x%x,offset=0x%x]' % (
+        self.filename, self.file_size, self.compress_size, self.file_offset)
+
+  def IsCompressed(self):
+    """Returns True iff the entry is compressed."""
+    return self._info.compress_type != zipfile.ZIP_STORED
+
+  def IsElfFile(self):
+    """Returns True iff the entry is an ELF file."""
+    with self._file.open(self._info, 'r') as f:
+      return f.read(4) == '\x7fELF'
+
+  @staticmethod
+  def _ZipFileOffsetFromLocalHeader(fd, local_header_offset):
+    """Return a file's start offset from its zip archive local header.
+
+    Args:
+      fd: Input file object.
+      local_header_offset: Local header offset (from its ZipInfo entry).
+    Returns:
+      file start offset.
+    """
+    FILE_NAME_LEN_OFFSET = 26
+    FILE_NAME_OFFSET = 30
+    fd.seek(local_header_offset + FILE_NAME_LEN_OFFSET)
+    file_name_len = struct.unpack('H', fd.read(2))[0]
+    extra_field_len = struct.unpack('H', fd.read(2))[0]
+    file_offset = (local_header_offset + FILE_NAME_OFFSET +
+                    file_name_len + extra_field_len)
+    return file_offset
+
+
+class ApkReader(object):
+  """A convenience class used to read the content of APK files.
+
+  Its design is very similar to the one from zipfile.ZipFile, except
+  that its returns ApkZipInfo entries which provide a |file_offset|
+  property that can be used to know where a given file is located inside
+  the archive.
+
+  It is also easy to mock for unit-testing (see MockApkReader in
+  apk_utils_unittest.py) without creating any files on disk.
+
+  Usage is the following:
+    - Create an instance using a with statement (for proper unit-testing).
+    - Call ListEntries() to list all entries in the archive. This returns
+      a list of ApkZipInfo entries.
+    - Or call FindEntry() corresponding to a given path within the archive.
+
+  For example:
+     with ApkReader(input_apk_path) as reader:
+       info = reader.FindEntry('lib/armeabi-v7a/libfoo.so')
+       if info.IsCompressed() or not info.IsElfFile():
+         raise Exception('Invalid library path")
+
+  The ApkZipInfo can be used to inspect the entry's metadata, or read its
+  content with the ReadAll() method. See its documentation for all details.
+  """
+  def __init__(self, apk_path):
+    """Initialize instance."""
+    self._zip_file = zipfile.ZipFile(apk_path, 'r')
+    self._path = apk_path
+
+  def __enter__(self):
+    """Python context manager entry."""
+    return self
+
+  def __exit__(self, *kwargs):
+    """Python context manager exit."""
+    self.Close()
+
+  @property
+  def path(self):
+    """The corresponding input APK path."""
+    return self._path
+
+  def Close(self):
+    """Close the reader (and underlying ZipFile instance)."""
+    self._zip_file.close()
+
+  def ListEntries(self):
+    """Return a list of ApkZipInfo entries for this APK."""
+    result = []
+    for info in self._zip_file.infolist():
+      result.append(ApkZipInfo(self._zip_file, info))
+    return result
+
+  def FindEntry(self, file_path):
+    """Return an ApkZipInfo instance for a given archive file path.
+
+    Args:
+      file_path: zip file path.
+    Return:
+      A new ApkZipInfo entry on success.
+    Raises:
+      KeyError on failure (entry not found).
+    """
+    info = self._zip_file.getinfo(file_path)
+    return ApkZipInfo(self._zip_file, info)
+
+
+
+class ApkNativeLibraries(object):
+  """A class for the list of uncompressed shared libraries inside an APK.
+
+  Create a new instance by passing the path to an input APK, then use
+  the FindLibraryByOffset() method to find the native shared library path
+  corresponding to a given file offset.
+
+  GetAbiList() and GetLibrariesList() can also be used to inspect
+  the state of the instance.
+  """
+  def __init__(self, apk_reader):
+    """Initialize instance.
+
+    Args:
+      apk_reader: An ApkReader instance corresponding to the input APK.
+    """
+    self._native_libs = []
+    for entry in apk_reader.ListEntries():
+      # Chromium uses so-called 'placeholder' native shared libraries
+      # that have a size of 0, and are only used to deal with bugs in
+      # older Android system releases (they are never loaded and cannot
+      # appear in stack traces). Ignore these here to avoid generating
+      # confusing results.
+      if entry.file_size == 0:
+        continue
+
+      # Only uncompressed libraries can appear in stack traces.
+      if entry.IsCompressed():
+        continue
+
+      # Only consider files within lib/ and with a filename ending with .so
+      # at the moment. NOTE: Do not require a 'lib' prefix, since that would
+      # prevent finding the 'crazy.libXXX.so' libraries used by Chromium.
+      if (not entry.filename.startswith('lib/') or
+          not entry.filename.endswith('.so')):
+        continue
+
+      lib_path = entry.filename
+
+      self._native_libs.append(
+          (lib_path, entry.file_offset, entry.file_offset + entry.file_size))
+
+  def IsEmpty(self):
+    """Return true iff the list is empty."""
+    return not bool(self._native_libs)
+
+  def GetLibraries(self):
+    """Return the list of all library paths in this instance."""
+    return sorted([x[0] for x in self._native_libs])
+
+  def GetDumpList(self):
+    """Retrieve full library map.
+
+    Returns:
+      A list of (lib_path, file_offset, file_size) tuples, sorted
+      in increasing |file_offset| values.
+    """
+    result = []
+    for entry in self._native_libs:
+      lib_path, file_start, file_end = entry
+      result.append((lib_path, file_start, file_end - file_start))
+
+    return sorted(result, lambda x, y: cmp(x[1], y[1]))
+
+  def FindLibraryByOffset(self, file_offset):
+    """Find the native library at a given file offset.
+
+    Args:
+      file_offset: File offset within the original APK.
+    Returns:
+      Returns a (lib_path, lib_offset) tuple on success, or (None, 0)
+      on failure. Note that lib_path will omit the 'lib/$ABI/' prefix,
+      lib_offset is the adjustment of file_offset within the library.
+    """
+    for lib_path, start_offset, end_offset in self._native_libs:
+      if file_offset >= start_offset and file_offset < end_offset:
+        return (lib_path, file_offset - start_offset)
+
+    return (None, 0)
+
+
+class ApkLibraryPathTranslator(object):
+  """Translates APK file paths + byte offsets into library path + offset.
+
+  The purpose of this class is to translate a native shared library path
+  that points to an APK into a new device-specific path that points to a
+  native shared library, as if it was installed there. E.g.:
+
+     ('/data/data/com.example.app-1/base.apk', 0x123be00)
+
+  would be translated into:
+
+     ('/data/data/com.example.app-1/base.apk!lib/libfoo.so', 0x3be00)
+
+  If the original APK (installed as base.apk) contains an uncompressed shared
+  library under lib/armeabi-v7a/libfoo.so at offset 0x120000.
+
+  Note that the virtual device path after the ! doesn't necessarily match
+  the path inside the .apk. This doesn't really matter for the rest of
+  the symbolization functions since only the file's base name can be used
+  to find the corresponding file on the host.
+
+  Usage is the following:
+
+     1/ Create new instance.
+
+     2/ Call AddHostApk() one or several times to add the host path
+        of an APK, its package name, and device-installed named.
+
+     3/ Call TranslatePath() to translate a (path, offset) tuple corresponding
+        to an on-device APK, into the corresponding virtual device library
+        path and offset.
+  """
+
+  # Depending on the version of the system, a non-system APK might be installed
+  # on a path that looks like the following:
+  #
+  #  * /data/..../<package_name>-<number>.apk, where <number> is used to
+  #    distinguish several versions of the APK during package updates.
+  #
+  #  * /data/..../<package_name>-<suffix>/base.apk, where <suffix> is a
+  #    string of random ASCII characters following the dash after the
+  #    package name. This serves as a way to distinguish the installation
+  #    paths during package update, and randomize its final location
+  #    (to prevent apps from hard-coding the paths to other apps).
+  #
+  #    Note that the 'base.apk' name comes from the system.
+  #
+  #  * /data/.../<package_name>-<suffix>/<split_name>.apk, where <suffix>
+  #    is the same as above, and <split_name> is the name of am app bundle
+  #    split APK.
+  #
+  # System APKs are installed on paths that look like /system/app/Foo.apk
+  # but this class ignores them intentionally.
+
+  # Compiler regular expression for the first format above.
+  _RE_APK_PATH_1 = re.compile(
+      r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<version>[0-9]+)\.apk')
+
+  # Compiled regular expression for the second and third formats above.
+  _RE_APK_PATH_2 = re.compile(
+      r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<suffix>[^/]+)/' +
+      r'(?P<apk_name>.+\.apk)')
+
+  def __init__(self):
+    """Initialize instance. Call AddHostApk() to add host apk file paths."""
+    self._path_map = {}  # Maps (package_name, apk_name) to host-side APK path.
+    self._libs_map = {}  # Maps APK host path to ApkNativeLibrariesMap instance.
+
+  def AddHostApk(self, package_name, native_libs, device_apk_name=None):
+    """Add a file path to the host APK search list.
+
+    Args:
+      package_name: Corresponding apk package name.
+      native_libs: ApkNativeLibraries instance for the corresponding APK.
+      device_apk_name: Optional expected name of the installed APK on the
+        device. This is only useful when symbolizing app bundle that run on
+        Android L+. I.e. it will be ignored in other cases.
+    """
+    if native_libs.IsEmpty():
+      logging.debug('Ignoring host APK without any uncompressed native ' +
+                    'libraries: %s', device_apk_name)
+      return
+
+    # If the APK name is not provided, use the default of 'base.apk'. This
+    # will be ignored if we find <package_name>-<number>.apk file paths
+    # in the input, but will work properly for Android L+, as long as we're
+    # not using Android app bundles.
+    device_apk_name = device_apk_name or 'base.apk'
+
+    key = "%s/%s" % (package_name, device_apk_name)
+    if key in self._libs_map:
+      raise KeyError('There is already an APK associated with (%s)' % key)
+
+    self._libs_map[key] = native_libs
+
+  @staticmethod
+  def _MatchApkDeviceInstallPath(apk_path):
+    """Check whether a given path matches an installed APK device file path.
+
+    Args:
+      apk_path: Device-specific file path.
+    Returns:
+      On success, a (package_name, apk_name) tuple. On failure, (None. None).
+    """
+    m = ApkLibraryPathTranslator._RE_APK_PATH_1.match(apk_path)
+    if m:
+      return (m.group('package_name'), 'base.apk')
+
+    m = ApkLibraryPathTranslator._RE_APK_PATH_2.match(apk_path)
+    if m:
+      return (m.group('package_name'), m.group('apk_name'))
+
+    return (None, None)
+
+  def TranslatePath(self, apk_path, apk_offset):
+    """Translate a potential apk file path + offset into library path + offset.
+
+    Args:
+      apk_path: Library or apk file path on the device (e.g.
+        '/data/data/com.example.app-XSAHKSJH/base.apk').
+      apk_offset: Byte offset within the library or apk.
+
+    Returns:
+      a new (lib_path, lib_offset) tuple. If |apk_path| points to an APK,
+      then this function searches inside the corresponding host-side APKs
+      (added with AddHostApk() above) for the corresponding uncompressed
+      native shared library at |apk_offset|, if found, this returns a new
+      device-specific path corresponding to a virtual installation of said
+      library with an adjusted offset.
+
+      Otherwise, just return the original (apk_path, apk_offset) values.
+    """
+    if not apk_path.endswith('.apk'):
+      return (apk_path, apk_offset)
+
+    apk_package, apk_name = self._MatchApkDeviceInstallPath(apk_path)
+    if not apk_package:
+      return (apk_path, apk_offset)
+
+    key = '%s/%s' % (apk_package, apk_name)
+    native_libs = self._libs_map.get(key)
+    if not native_libs:
+      logging.debug('Unknown %s package', key)
+      return (apk_path, apk_offset)
+
+    lib_name, new_offset = native_libs.FindLibraryByOffset(apk_offset)
+    if not lib_name:
+      logging.debug('Invalid offset in %s.apk package: %d', key, apk_offset)
+      return (apk_path, apk_offset)
+
+    lib_name = os.path.basename(lib_name)
+
+    # Some libraries are stored with a crazy. prefix inside the APK, this
+    # is done to prevent the PackageManager from extracting the libraries
+    # at installation time when running on pre Android M systems, where the
+    # system linker cannot load libraries directly from APKs.
+    crazy_prefix = 'crazy.'
+    if lib_name.startswith(crazy_prefix):
+      lib_name = lib_name[len(crazy_prefix):]
+
+    # Put this in a fictional lib sub-directory for good measure.
+    new_path = '%s!lib/%s' % (apk_path, lib_name)
+
+    return (new_path, new_offset)
diff --git a/src/build/android/pylib/symbols/apk_native_libs_unittest.py b/src/build/android/pylib/symbols/apk_native_libs_unittest.py
new file mode 100644
index 0000000..416918d
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_native_libs_unittest.py
@@ -0,0 +1,396 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import unittest
+
+from pylib.symbols import apk_native_libs
+
+# Mock ELF-like data
+MOCK_ELF_DATA = '\x7fELFFFFFFFFFFFFFFFF'
+
+class MockApkZipInfo(object):
+  """A mock ApkZipInfo class, returned by MockApkReaderFactory instances."""
+  def __init__(self, filename, file_size, compress_size, file_offset,
+               file_data):
+    self.filename = filename
+    self.file_size = file_size
+    self.compress_size = compress_size
+    self.file_offset = file_offset
+    self._data = file_data
+
+  def __repr__(self):
+    """Convert to string for debugging."""
+    return 'MockApkZipInfo["%s",size=%d,compressed=%d,offset=%d]' % (
+        self.filename, self.file_size, self.compress_size, self.file_offset)
+
+  def IsCompressed(self):
+    """Returns True iff the entry is compressed."""
+    return self.file_size != self.compress_size
+
+  def IsElfFile(self):
+    """Returns True iff the entry is an ELF file."""
+    if not self._data or len(self._data) < 4:
+      return False
+
+    return self._data[0:4] == '\x7fELF'
+
+
+class MockApkReader(object):
+  """A mock ApkReader instance used during unit-testing.
+
+  Do not use directly, but use a MockApkReaderFactory context, as in:
+
+     with MockApkReaderFactory() as mock:
+       mock.AddTestEntry(file_path, file_size, compress_size, file_data)
+       ...
+
+       # Actually returns the mock instance.
+       apk_reader = apk_native_libs.ApkReader('/some/path.apk')
+  """
+  def __init__(self, apk_path='test.apk'):
+    """Initialize instance."""
+    self._entries = []
+    self._fake_offset = 0
+    self._path = apk_path
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, *kwarg):
+    self.Close()
+    return
+
+  @property
+  def path(self):
+    return self._path
+
+  def AddTestEntry(self, filepath, file_size, compress_size, file_data):
+    """Add a new entry to the instance for unit-tests.
+
+    Do not call this directly, use the AddTestEntry() method on the parent
+    MockApkReaderFactory instance.
+
+    Args:
+      filepath: archive file path.
+      file_size: uncompressed file size in bytes.
+      compress_size: compressed size in bytes.
+      file_data: file data to be checked by IsElfFile()
+
+    Note that file_data can be None, or that its size can be actually
+    smaller than |compress_size| when used during unit-testing.
+    """
+    self._entries.append(MockApkZipInfo(filepath, file_size, compress_size,
+                         self._fake_offset, file_data))
+    self._fake_offset += compress_size
+
+  def Close(self): # pylint: disable=no-self-use
+    """Close this reader instance."""
+    return
+
+  def ListEntries(self):
+    """Return a list of MockApkZipInfo instances for this input APK."""
+    return self._entries
+
+  def FindEntry(self, file_path):
+    """Find the MockApkZipInfo instance corresponds to a given file path."""
+    for entry in self._entries:
+      if entry.filename == file_path:
+        return entry
+    raise KeyError('Could not find mock zip archive member for: ' + file_path)
+
+
+class MockApkReaderTest(unittest.TestCase):
+
+  def testEmpty(self):
+    with MockApkReader() as reader:
+      entries = reader.ListEntries()
+      self.assertTrue(len(entries) == 0)
+      with self.assertRaises(KeyError):
+        reader.FindEntry('non-existent-entry.txt')
+
+  def testSingleEntry(self):
+    with MockApkReader() as reader:
+      reader.AddTestEntry('some-path/some-file', 20000, 12345, file_data=None)
+      entries = reader.ListEntries()
+      self.assertTrue(len(entries) == 1)
+      entry = entries[0]
+      self.assertEqual(entry.filename, 'some-path/some-file')
+      self.assertEqual(entry.file_size, 20000)
+      self.assertEqual(entry.compress_size, 12345)
+      self.assertTrue(entry.IsCompressed())
+
+      entry2 = reader.FindEntry('some-path/some-file')
+      self.assertEqual(entry, entry2)
+
+  def testMultipleEntries(self):
+    with MockApkReader() as reader:
+      _ENTRIES = {
+        'foo.txt': (1024, 1024, 'FooFooFoo'),
+        'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'),
+      }
+      for path, props in _ENTRIES.iteritems():
+        reader.AddTestEntry(path, props[0], props[1], props[2])
+
+      entries = reader.ListEntries()
+      self.assertEqual(len(entries), len(_ENTRIES))
+      for path, props in _ENTRIES.iteritems():
+        entry = reader.FindEntry(path)
+        self.assertEqual(entry.filename, path)
+        self.assertEqual(entry.file_size, props[0])
+        self.assertEqual(entry.compress_size, props[1])
+
+
+class ApkNativeLibrariesTest(unittest.TestCase):
+
+  def setUp(self):
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testEmptyApk(self):
+    with MockApkReader() as reader:
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertTrue(libs_map.IsEmpty())
+      self.assertEqual(len(libs_map.GetLibraries()), 0)
+      lib_path, lib_offset = libs_map.FindLibraryByOffset(0)
+      self.assertIsNone(lib_path)
+      self.assertEqual(lib_offset, 0)
+
+  def testSimpleApk(self):
+    with MockApkReader() as reader:
+      _MOCK_ENTRIES = [
+        # Top-level library should be ignored.
+        ('libfoo.so', 1000, 1000, MOCK_ELF_DATA, False),
+        # Library not under lib/ should be ignored.
+        ('badlib/test-abi/libfoo2.so', 1001, 1001, MOCK_ELF_DATA, False),
+        # Library under lib/<abi>/ but without .so extension should be ignored.
+        ('lib/test-abi/libfoo4.so.1', 1003, 1003, MOCK_ELF_DATA, False),
+        # Library under lib/<abi>/ with .so suffix, but compressed -> ignored.
+        ('lib/test-abi/libfoo5.so', 1004, 1003, MOCK_ELF_DATA, False),
+        # First correct library
+        ('lib/test-abi/libgood1.so', 1005, 1005, MOCK_ELF_DATA, True),
+        # Second correct library: support sub-directories
+        ('lib/test-abi/subdir/libgood2.so', 1006, 1006, MOCK_ELF_DATA, True),
+        # Third correct library, no lib prefix required
+        ('lib/test-abi/crazy.libgood3.so', 1007, 1007, MOCK_ELF_DATA, True),
+      ]
+      file_offsets = []
+      prev_offset = 0
+      for ent in _MOCK_ENTRIES:
+        reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+        file_offsets.append(prev_offset)
+        prev_offset += ent[2]
+
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertFalse(libs_map.IsEmpty())
+      self.assertEqual(libs_map.GetLibraries(), [
+          'lib/test-abi/crazy.libgood3.so',
+          'lib/test-abi/libgood1.so',
+          'lib/test-abi/subdir/libgood2.so',
+          ])
+
+      BIAS = 10
+      for mock_ent, file_offset in zip(_MOCK_ENTRIES, file_offsets):
+        if mock_ent[4]:
+          lib_path, lib_offset = libs_map.FindLibraryByOffset(
+              file_offset + BIAS)
+          self.assertEqual(lib_path, mock_ent[0])
+          self.assertEqual(lib_offset, BIAS)
+
+
+  def testMultiAbiApk(self):
+    with MockApkReader() as reader:
+      _MOCK_ENTRIES = [
+        ('lib/abi1/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+        ('lib/abi2/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+      ]
+      for ent in _MOCK_ENTRIES:
+        reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertFalse(libs_map.IsEmpty())
+      self.assertEqual(libs_map.GetLibraries(), [
+          'lib/abi1/libfoo.so', 'lib/abi2/libfoo.so'])
+
+      lib1_name, lib1_offset = libs_map.FindLibraryByOffset(10)
+      self.assertEqual(lib1_name, 'lib/abi1/libfoo.so')
+      self.assertEqual(lib1_offset, 10)
+
+      lib2_name, lib2_offset = libs_map.FindLibraryByOffset(1000)
+      self.assertEqual(lib2_name, 'lib/abi2/libfoo.so')
+      self.assertEqual(lib2_offset, 0)
+
+
+class MockApkNativeLibraries(apk_native_libs.ApkNativeLibraries):
+  """A mock ApkNativeLibraries instance that can be used as input to
+     ApkLibraryPathTranslator without creating an ApkReader instance.
+
+     Create a new instance, then call AddTestEntry or AddTestEntries
+     as many times as necessary, before using it as a regular
+     ApkNativeLibraries instance.
+  """
+  # pylint: disable=super-init-not-called
+  def __init__(self):
+    self._native_libs = []
+
+  # pylint: enable=super-init-not-called
+
+  def AddTestEntry(self, lib_path, file_offset, file_size):
+    """Add a new test entry.
+
+    Args:
+      entry: A tuple of (library-path, file-offset, file-size) values,
+          (e.g. ('lib/armeabi-v8a/libfoo.so', 0x10000, 0x2000)).
+    """
+    self._native_libs.append((lib_path, file_offset, file_offset + file_size))
+
+  def AddTestEntries(self, entries):
+    """Add a list of new test entries.
+
+    Args:
+      entries: A list of (library-path, file-offset, file-size) values.
+    """
+    for entry in entries:
+      self.AddTestEntry(entry[0], entry[1], entry[2])
+
+
+class MockApkNativeLibrariesTest(unittest.TestCase):
+
+  def testEmptyInstance(self):
+    mock = MockApkNativeLibraries()
+    self.assertTrue(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), [])
+    self.assertEqual(mock.GetDumpList(), [])
+
+  def testAddTestEntry(self):
+    mock = MockApkNativeLibraries()
+    mock.AddTestEntry('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000)
+    mock.AddTestEntry('lib/x86/libzoo.so', 0x10000, 0x10000)
+    mock.AddTestEntry('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000)
+    self.assertFalse(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+                                           'lib/armeabi-v7a/libfoo.so',
+                                           'lib/x86/libzoo.so'])
+    self.assertEqual(mock.GetDumpList(), [
+        ('lib/x86/libzoo.so', 0x10000, 0x10000),
+        ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+        ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+
+  def testAddTestEntries(self):
+    mock = MockApkNativeLibraries()
+    mock.AddTestEntries([
+      ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+      ('lib/x86/libzoo.so', 0x10000, 0x10000),
+      ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+    self.assertFalse(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+                                           'lib/armeabi-v7a/libfoo.so',
+                                           'lib/x86/libzoo.so'])
+    self.assertEqual(mock.GetDumpList(), [
+        ('lib/x86/libzoo.so', 0x10000, 0x10000),
+        ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+        ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+
+
+class ApkLibraryPathTranslatorTest(unittest.TestCase):
+
+  def _CheckUntranslated(self, translator, path, offset):
+    """Check that a given (path, offset) is not modified by translation."""
+    self.assertEqual(translator.TranslatePath(path, offset), (path, offset))
+
+
+  def _CheckTranslated(self, translator, path, offset, new_path, new_offset):
+    """Check that (path, offset) is translated into (new_path, new_offset)."""
+    self.assertEqual(translator.TranslatePath(path, offset),
+                     (new_path, new_offset))
+
+  def testEmptyInstance(self):
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    self._CheckUntranslated(
+        translator, '/data/data/com.example.app-1/base.apk', 0x123456)
+
+  def testSimpleApk(self):
+    mock_libs = MockApkNativeLibraries()
+    mock_libs.AddTestEntries([
+      ('lib/test-abi/libfoo.so', 200, 2000),
+      ('lib/test-abi/libbar.so', 3200, 3000),
+      ('lib/test-abi/crazy.libzoo.so', 6200, 2000),
+    ])
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    translator.AddHostApk('com.example.app', mock_libs)
+
+    # Offset is within the first uncompressed library
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-9.apk', 757,
+        '/data/data/com.example.app-9.apk!lib/libfoo.so', 557)
+
+    # Offset is within the second compressed library.
+    self._CheckUntranslated(
+        translator,
+        '/data/data/com.example.app-9/base.apk', 2800)
+
+    # Offset is within the third uncompressed library.
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-1/base.apk', 3628,
+        '/data/data/com.example.app-1/base.apk!lib/libbar.so', 428)
+
+    # Offset is within the fourth uncompressed library with crazy. prefix
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-XX/base.apk', 6500,
+        '/data/data/com.example.app-XX/base.apk!lib/libzoo.so', 300)
+
+    # Out-of-bounds apk offset.
+    self._CheckUntranslated(
+        translator,
+        '/data/data/com.example.app-1/base.apk', 10000)
+
+    # Invalid package name.
+    self._CheckUntranslated(
+        translator, '/data/data/com.example2.app-1/base.apk', 757)
+
+    # Invalid apk name.
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/not-base.apk', 100)
+
+    # Invalid file extensions.
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/base', 100)
+
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/base.apk.dex', 100)
+
+  def testBundleApks(self):
+    mock_libs1 = MockApkNativeLibraries()
+    mock_libs1.AddTestEntries([
+      ('lib/test-abi/libfoo.so', 200, 2000),
+      ('lib/test-abi/libbbar.so', 3200, 3000),
+    ])
+    mock_libs2 = MockApkNativeLibraries()
+    mock_libs2.AddTestEntries([
+      ('lib/test-abi/libzoo.so', 200, 2000),
+      ('lib/test-abi/libtool.so', 3000, 4000),
+    ])
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    translator.AddHostApk('com.example.app', mock_libs1, 'base-master.apk')
+    translator.AddHostApk('com.example.app', mock_libs2, 'feature-master.apk')
+
+    self._CheckTranslated(
+      translator,
+      '/data/app/com.example.app-XUIYIUW/base-master.apk', 757,
+      '/data/app/com.example.app-XUIYIUW/base-master.apk!lib/libfoo.so', 557)
+
+    self._CheckTranslated(
+      translator,
+      '/data/app/com.example.app-XUIYIUW/feature-master.apk', 3200,
+      '/data/app/com.example.app-XUIYIUW/feature-master.apk!lib/libtool.so',
+      200)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/symbols/deobfuscator.py b/src/build/android/pylib/symbols/deobfuscator.py
new file mode 100644
index 0000000..ffc23b8
--- /dev/null
+++ b/src/build/android/pylib/symbols/deobfuscator.py
@@ -0,0 +1,175 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import threading
+import time
+import uuid
+
+from devil.utils import reraiser_thread
+from pylib import constants
+
+
+_MINIUMUM_TIMEOUT = 3.0
+_PER_LINE_TIMEOUT = .002  # Should be able to process 500 lines per second.
+_PROCESS_START_TIMEOUT = 10.0
+_MAX_RESTARTS = 10  # Should be plenty unless tool is crashing on start-up.
+
+
+class Deobfuscator(object):
+  def __init__(self, mapping_path):
+    script_path = os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android',
+                               'stacktrace', 'java_deobfuscate.py')
+    cmd = [script_path, mapping_path]
+    # Allow only one thread to call TransformLines() at a time.
+    self._lock = threading.Lock()
+    # Ensure that only one thread attempts to kill self._proc in Close().
+    self._close_lock = threading.Lock()
+    self._closed_called = False
+    # Assign to None so that attribute exists if Popen() throws.
+    self._proc = None
+    # Start process eagerly to hide start-up latency.
+    self._proc_start_time = time.time()
+    self._proc = subprocess.Popen(
+        cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+        close_fds=True)
+
+  def IsClosed(self):
+    return self._closed_called or self._proc.returncode is not None
+
+  def IsBusy(self):
+    return self._lock.locked()
+
+  def IsReady(self):
+    return not self.IsClosed() and not self.IsBusy()
+
+  def TransformLines(self, lines):
+    """Deobfuscates obfuscated names found in the given lines.
+
+    If anything goes wrong (process crashes, timeout, etc), returns |lines|.
+
+    Args:
+      lines: A list of strings without trailing newlines.
+
+    Returns:
+      A list of strings without trailing newlines.
+    """
+    if not lines:
+      return []
+
+    # Deobfuscated stacks contain more frames than obfuscated ones when method
+    # inlining occurs. To account for the extra output lines, keep reading until
+    # this eof_line token is reached.
+    eof_line = uuid.uuid4().hex
+    out_lines = []
+
+    def deobfuscate_reader():
+      while True:
+        line = self._proc.stdout.readline()
+        # Return an empty string at EOF (when stdin is closed).
+        if not line:
+          break
+        line = line[:-1]
+        if line == eof_line:
+          break
+        out_lines.append(line)
+
+    if self.IsBusy():
+      logging.warning('deobfuscator: Having to wait for Java deobfuscation.')
+
+    # Allow only one thread to operate at a time.
+    with self._lock:
+      if self.IsClosed():
+        if not self._closed_called:
+          logging.warning('deobfuscator: Process exited with code=%d.',
+                          self._proc.returncode)
+          self.Close()
+        return lines
+
+      # TODO(agrieve): Can probably speed this up by only sending lines through
+      #     that might contain an obfuscated name.
+      reader_thread = reraiser_thread.ReraiserThread(deobfuscate_reader)
+      reader_thread.start()
+
+      try:
+        self._proc.stdin.write('\n'.join(lines))
+        self._proc.stdin.write('\n{}\n'.format(eof_line))
+        self._proc.stdin.flush()
+        time_since_proc_start = time.time() - self._proc_start_time
+        timeout = (max(0, _PROCESS_START_TIMEOUT - time_since_proc_start) +
+                   max(_MINIUMUM_TIMEOUT, len(lines) * _PER_LINE_TIMEOUT))
+        reader_thread.join(timeout)
+        if self.IsClosed():
+          logging.warning(
+              'deobfuscator: Close() called by another thread during join().')
+          return lines
+        if reader_thread.is_alive():
+          logging.error('deobfuscator: Timed out.')
+          self.Close()
+          return lines
+        return out_lines
+      except IOError:
+        logging.exception('deobfuscator: Exception during java_deobfuscate')
+        self.Close()
+        return lines
+
+  def Close(self):
+    with self._close_lock:
+      needs_closing = not self.IsClosed()
+      self._closed_called = True
+
+    if needs_closing:
+      self._proc.stdin.close()
+      self._proc.kill()
+      self._proc.wait()
+
+  def __del__(self):
+    # self._proc is None when Popen() fails.
+    if not self._closed_called and self._proc:
+      logging.error('deobfuscator: Forgot to Close()')
+      self.Close()
+
+
+class DeobfuscatorPool(object):
+  # As of Sep 2017, each instance requires about 500MB of RAM, as measured by:
+  # /usr/bin/time -v build/android/stacktrace/java_deobfuscate.py \
+  #     out/Release/apks/ChromePublic.apk.mapping
+  def __init__(self, mapping_path, pool_size=4):
+    self._mapping_path = mapping_path
+    self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)]
+    # Allow only one thread to select from the pool at a time.
+    self._lock = threading.Lock()
+    self._num_restarts = 0
+
+  def TransformLines(self, lines):
+    with self._lock:
+      assert self._pool, 'TransformLines() called on a closed DeobfuscatorPool.'
+
+      # De-obfuscation is broken.
+      if self._num_restarts == _MAX_RESTARTS:
+        raise Exception('Deobfuscation seems broken.')
+
+      # Restart any closed Deobfuscators.
+      for i, d in enumerate(self._pool):
+        if d.IsClosed():
+          logging.warning('deobfuscator: Restarting closed instance.')
+          self._pool[i] = Deobfuscator(self._mapping_path)
+          self._num_restarts += 1
+          if self._num_restarts == _MAX_RESTARTS:
+            logging.warning('deobfuscator: MAX_RESTARTS reached.')
+
+      selected = next((x for x in self._pool if x.IsReady()), self._pool[0])
+      # Rotate the order so that next caller will not choose the same one.
+      self._pool.remove(selected)
+      self._pool.append(selected)
+
+    return selected.TransformLines(lines)
+
+  def Close(self):
+    with self._lock:
+      for d in self._pool:
+        d.Close()
+      self._pool = None
diff --git a/src/build/android/pylib/symbols/elf_symbolizer.py b/src/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000..1f2f918
--- /dev/null
+++ b/src/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,487 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+ELF_MAGIC = '\x7f\x45\x4c\x46'
+
+
+def ContainsElfMagic(file_path):
+  if os.path.getsize(file_path) < 4:
+    return False
+  try:
+    with open(file_path, 'r') as f:
+      b = f.read(4)
+      return b == ELF_MAGIC
+  except IOError:
+    return False
+
+
+class ELFSymbolizer(object):
+  """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+  This class is a frontend for addr2line (part of GNU binutils), designed to
+  symbolize batches of large numbers of symbols for a given ELF file. It
+  supports sharding symbolization against many addr2line instances and
+  pipelining of multiple requests per each instance (in order to hide addr2line
+  internals and OS pipe latencies).
+
+  The interface exhibited by this class is a very simple asynchronous interface,
+  which is based on the following three methods:
+  - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+  - The |callback| method: used to communicated back the symbol information.
+  - Join(): called to conclude the batch to gather the last outstanding results.
+  In essence, before the Join method returns, this class will have issued as
+  many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+  that due to multiprocess sharding, callbacks can be delivered out of order.
+
+  Some background about addr2line:
+  - it is invoked passing the elf path in the cmdline, piping the addresses in
+    its stdin and getting results on its stdout.
+  - it has pretty large response times for the first requests, but it
+    works very well in streaming mode once it has been warmed up.
+  - it doesn't scale by itself (on more cores). However, spawning multiple
+    instances at the same time on the same file is pretty efficient as they
+    keep hitting the pagecache and become mostly CPU bound.
+  - it might hang or crash, mostly for OOM. This class deals with both of these
+    problems.
+
+  Despite the "scary" imports and the multi* words above, (almost) no multi-
+  threading/processing is involved from the python viewpoint. Concurrency
+  here is achieved by spawning several addr2line subprocesses and handling their
+  output pipes asynchronously. Therefore, all the code here (with the exception
+  of the Queue instance in Addr2Line) should be free from mind-blowing
+  thread-safety concerns.
+
+  The multiprocess sharding works as follows:
+  The symbolizer tries to use the lowest number of addr2line instances as
+  possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+  in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+  worth the startup cost.
+  The multiprocess logic kicks in as soon as the queues for the existing
+  instances grow. Specifically, once all the existing instances reach the
+  |max_queue_size| bound, a new addr2line instance is kicked in.
+  In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+  have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+  blocking the SymbolizeAsync method.
+
+  This module has been deliberately designed to be dependency free (w.r.t. of
+  other modules in this project), to allow easy reuse in external projects.
+  """
+
+  def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+      max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+      source_root_path=None, strip_base_path=None):
+    """Args:
+      elf_file_path: path of the elf file to be symbolized.
+      addr2line_path: path of the toolchain's addr2line binary.
+      callback: a callback which will be invoked for each resolved symbol with
+          the two args (sym_info, callback_arg). The former is an instance of
+          |ELFSymbolInfo| and contains the symbol information. The latter is an
+          embedder-provided argument which is passed to SymbolizeAsync().
+      inlines: when True, the ELFSymbolInfo will contain also the details about
+          the outer inlining functions. When False, only the innermost function
+          will be provided.
+      max_concurrent_jobs: Max number of addr2line instances spawned.
+          Parallelize responsibly, addr2line is a memory and I/O monster.
+      max_queue_size: Max number of outstanding requests per addr2line instance.
+      addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+          After the timeout, the instance will be considered hung and respawned.
+      source_root_path: In some toolchains only the name of the source file is
+          is output, without any path information; disambiguation searches
+          through the source directory specified by |source_root_path| argument
+          for files whose name matches, adding the full path information to the
+          output. For example, if the toolchain outputs "unicode.cc" and there
+          is a file called "unicode.cc" located under |source_root_path|/foo,
+          the tool will replace "unicode.cc" with
+          "|source_root_path|/foo/unicode.cc". If there are multiple files with
+          the same name, disambiguation will fail because the tool cannot
+          determine which of the files was the source of the symbol.
+      strip_base_path: Rebases the symbols source paths onto |source_root_path|
+          (i.e replace |strip_base_path| with |source_root_path).
+    """
+    assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+    self.elf_file_path = elf_file_path
+    self.addr2line_path = addr2line_path
+    self.callback = callback
+    self.inlines = inlines
+    self.max_concurrent_jobs = (max_concurrent_jobs or
+                                min(multiprocessing.cpu_count(), 4))
+    self.max_queue_size = max_queue_size
+    self.addr2line_timeout = addr2line_timeout
+    self.requests_counter = 0  # For generating monotonic request IDs.
+    self._a2l_instances = []  # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+    # If necessary, create disambiguation lookup table
+    self.disambiguate = source_root_path is not None
+    self.disambiguation_table = {}
+    self.strip_base_path = strip_base_path
+    if self.disambiguate:
+      self.source_root_path = os.path.abspath(source_root_path)
+      self._CreateDisambiguationTable()
+
+    # Create one addr2line instance. More instances will be created on demand
+    # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+    self._CreateNewA2LInstance()
+
+  def SymbolizeAsync(self, addr, callback_arg=None):
+    """Requests symbolization of a given address.
+
+    This method is not guaranteed to return immediately. It generally does, but
+    in some scenarios (e.g. all addr2line instances have full queues) it can
+    block to create back-pressure.
+
+    Args:
+      addr: address to symbolize.
+      callback_arg: optional argument which will be passed to the |callback|."""
+    assert isinstance(addr, int)
+
+    # Process all the symbols that have been resolved in the meanwhile.
+    # Essentially, this drains all the addr2line(s) out queues.
+    for a2l_to_purge in self._a2l_instances:
+      a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+      a2l_to_purge.RecycleIfNecessary()
+
+    # Find the best instance according to this logic:
+    # 1. Find an existing instance with the shortest queue.
+    # 2. If all of instances' queues are full, but there is room in the pool,
+    #    (i.e. < |max_concurrent_jobs|) create a new instance.
+    # 3. If there were already |max_concurrent_jobs| instances and all of them
+    #    had full queues, make back-pressure.
+
+    # 1.
+    def _SortByQueueSizeAndReqID(a2l):
+      return (a2l.queue_size, a2l.first_request_id)
+    a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+    # 2.
+    if (a2l.queue_size >= self.max_queue_size and
+        len(self._a2l_instances) < self.max_concurrent_jobs):
+      a2l = self._CreateNewA2LInstance()
+
+    # 3.
+    if a2l.queue_size >= self.max_queue_size:
+      a2l.WaitForNextSymbolInQueue()
+
+    a2l.EnqueueRequest(addr, callback_arg)
+
+  def WaitForIdle(self):
+    """Waits for all the outstanding requests to complete."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+
+  def Join(self):
+    """Waits for all the outstanding requests to complete and terminates."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+      a2l.Terminate()
+
+  def _CreateNewA2LInstance(self):
+    assert len(self._a2l_instances) < self.max_concurrent_jobs
+    a2l = ELFSymbolizer.Addr2Line(self)
+    self._a2l_instances.append(a2l)
+    return a2l
+
+  def _CreateDisambiguationTable(self):
+    """ Non-unique file names will result in None entries"""
+    start_time = time.time()
+    logging.info('Collecting information about available source files...')
+    self.disambiguation_table = {}
+
+    for root, _, filenames in os.walk(self.source_root_path):
+      for f in filenames:
+        self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+                                       self.disambiguation_table) else None
+    logging.info('Finished collecting information about '
+                 'possible files (took %.1f s).',
+                 (time.time() - start_time))
+
+
+  class Addr2Line(object):
+    """A python wrapper around an addr2line instance.
+
+    The communication with the addr2line process looks as follows:
+      [STDIN]         [STDOUT]  (from addr2line's viewpoint)
+    > f001111
+    > f002222
+                    < Symbol::Name(foo, bar) for f001111
+                    < /path/to/source/file.c:line_number
+    > f003333
+                    < Symbol::Name2() for f002222
+                    < /path/to/source/file.c:line_number
+                    < Symbol::Name3() for f003333
+                    < /path/to/source/file.c:line_number
+    """
+
+    SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+    def __init__(self, symbolizer):
+      self._symbolizer = symbolizer
+      self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+      # The request queue (i.e. addresses pushed to addr2line's stdin and not
+      # yet retrieved on stdout)
+      self._request_queue = collections.deque()
+
+      # This is essentially len(self._request_queue). It has been optimized to a
+      # separate field because turned out to be a perf hot-spot.
+      self.queue_size = 0
+
+      # Keep track of the number of symbols a process has processed to
+      # avoid a single process growing too big and using all the memory.
+      self._processed_symbols_count = 0
+
+      # Objects required to handle the addr2line subprocess.
+      self._proc = None  # Subprocess.Popen(...) instance.
+      self._thread = None  # Threading.thread instance.
+      self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+      self._RestartAddr2LineProcess()
+
+    def EnqueueRequest(self, addr, callback_arg):
+      """Pushes an address to addr2line's stdin (and keeps track of it)."""
+      self._symbolizer.requests_counter += 1  # For global "age" of requests.
+      req_idx = self._symbolizer.requests_counter
+      self._request_queue.append((addr, callback_arg, req_idx))
+      self.queue_size += 1
+      self._WriteToA2lStdin(addr)
+
+    def WaitForIdle(self):
+      """Waits until all the pending requests have been symbolized."""
+      while self.queue_size > 0:
+        self.WaitForNextSymbolInQueue()
+
+    def WaitForNextSymbolInQueue(self):
+      """Waits for the next pending request to be symbolized."""
+      if not self.queue_size:
+        return
+
+      # This outer loop guards against a2l hanging (detecting stdout timeout).
+      while True:
+        start_time = datetime.datetime.now()
+        timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+        # The inner loop guards against a2l crashing (checking if it exited).
+        while datetime.datetime.now() - start_time < timeout:
+          # poll() returns !None if the process exited. a2l should never exit.
+          if self._proc.poll():
+            logging.warning('addr2line crashed, respawning (lib: %s).',
+                            self._lib_file_name)
+            self._RestartAddr2LineProcess()
+            # TODO(primiano): the best thing to do in this case would be
+            # shrinking the pool size as, very likely, addr2line is crashed
+            # due to low memory (and the respawned one will die again soon).
+
+          try:
+            lines = self._out_queue.get(block=True, timeout=0.25)
+          except Queue.Empty:
+            # On timeout (1/4 s.) repeat the inner loop and check if either the
+            # addr2line process did crash or we waited its output for too long.
+            continue
+
+          # In nominal conditions, we get straight to this point.
+          self._ProcessSymbolOutput(lines)
+          return
+
+        # If this point is reached, we waited more than |addr2line_timeout|.
+        logging.warning('Hung addr2line process, respawning (lib: %s).',
+                        self._lib_file_name)
+        self._RestartAddr2LineProcess()
+
+    def ProcessAllResolvedSymbolsInQueue(self):
+      """Consumes all the addr2line output lines produced (without blocking)."""
+      if not self.queue_size:
+        return
+      while True:
+        try:
+          lines = self._out_queue.get_nowait()
+        except Queue.Empty:
+          break
+        self._ProcessSymbolOutput(lines)
+
+    def RecycleIfNecessary(self):
+      """Restarts the process if it has been used for too long.
+
+      A long running addr2line process will consume excessive amounts
+      of memory without any gain in performance."""
+      if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+        self._RestartAddr2LineProcess()
+
+
+    def Terminate(self):
+      """Kills the underlying addr2line process.
+
+      The poller |_thread| will terminate as well due to the broken pipe."""
+      try:
+        self._proc.kill()
+        self._proc.communicate()  # Essentially wait() without risking deadlock.
+      except Exception: # pylint: disable=broad-except
+        # An exception while terminating? How interesting.
+        pass
+      self._proc = None
+
+    def _WriteToA2lStdin(self, addr):
+      self._proc.stdin.write('%s\n' % hex(addr))
+      if self._symbolizer.inlines:
+        # In the case of inlines we output an extra blank line, which causes
+        # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+        self._proc.stdin.write('\n')
+      self._proc.stdin.flush()
+
+    def _ProcessSymbolOutput(self, lines):
+      """Parses an addr2line symbol output and triggers the client callback."""
+      (_, callback_arg, _) = self._request_queue.popleft()
+      self.queue_size -= 1
+
+      innermost_sym_info = None
+      sym_info = None
+      for (line1, line2) in lines:
+        prev_sym_info = sym_info
+        name = line1 if not line1.startswith('?') else None
+        source_path = None
+        source_line = None
+        m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+        if m:
+          if not m.group(1).startswith('?'):
+            source_path = m.group(1)
+            if not m.group(2).startswith('?'):
+              source_line = int(m.group(2))
+        else:
+          logging.warning('Got invalid symbol path from addr2line: %s', line2)
+
+        # In case disambiguation is on, and needed
+        was_ambiguous = False
+        disambiguated = False
+        if self._symbolizer.disambiguate:
+          if source_path and not posixpath.isabs(source_path):
+            path = self._symbolizer.disambiguation_table.get(source_path)
+            was_ambiguous = True
+            disambiguated = path is not None
+            source_path = path if disambiguated else source_path
+
+          # Use absolute paths (so that paths are consistent, as disambiguation
+          # uses absolute paths)
+          if source_path and not was_ambiguous:
+            source_path = os.path.abspath(source_path)
+
+        if source_path and self._symbolizer.strip_base_path:
+          # Strip the base path
+          source_path = re.sub('^' + self._symbolizer.strip_base_path,
+              self._symbolizer.source_root_path or '', source_path)
+
+        sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+                                 disambiguated)
+        if prev_sym_info:
+          prev_sym_info.inlined_by = sym_info
+        if not innermost_sym_info:
+          innermost_sym_info = sym_info
+
+      self._processed_symbols_count += 1
+      self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+    def _RestartAddr2LineProcess(self):
+      if self._proc:
+        self.Terminate()
+
+      # The only reason of existence of this Queue (and the corresponding
+      # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+      # Essentially this is a pipe able to extract a couple of lines atomically.
+      self._out_queue = Queue.Queue()
+
+      # Start the underlying addr2line process in line buffered mode.
+
+      cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+          '--exe=' + self._symbolizer.elf_file_path]
+      if self._symbolizer.inlines:
+        cmd += ['--inlines']
+      self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+          stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+      # Start the poller thread, which simply moves atomically the lines read
+      # from the addr2line's stdout to the |_out_queue|.
+      self._thread = threading.Thread(
+          target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+          args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+      self._thread.daemon = True  # Don't prevent early process exit.
+      self._thread.start()
+
+      self._processed_symbols_count = 0
+
+      # Replay the pending requests on the new process (only for the case
+      # of a hung addr2line timing out during the game).
+      for (addr, _, _) in self._request_queue:
+        self._WriteToA2lStdin(addr)
+
+    @staticmethod
+    def StdoutReaderThread(process_pipe, queue, inlines):
+      """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+      This is the only piece of code not running on the main thread. It merely
+      writes to a Queue, which is thread-safe. In the case of inlines, it
+      detects the ??,??:0 marker and sends the lines atomically, such that the
+      main thread always receives all the lines corresponding to one symbol in
+      one shot."""
+      try:
+        lines_for_one_symbol = []
+        while True:
+          line1 = process_pipe.readline().rstrip('\r\n')
+          line2 = process_pipe.readline().rstrip('\r\n')
+          if not line1 or not line2:
+            break
+          inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+                                  (line1 != '??' and line2 != '??:0'))
+          if not inlines or inline_has_more_lines:
+            lines_for_one_symbol += [(line1, line2)]
+          if inline_has_more_lines:
+            continue
+          queue.put(lines_for_one_symbol)
+          lines_for_one_symbol = []
+        process_pipe.close()
+
+      # Every addr2line processes will die at some point, please die silently.
+      except (IOError, OSError):
+        pass
+
+    @property
+    def first_request_id(self):
+      """Returns the request_id of the oldest pending request in the queue."""
+      return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+  """The result of the symbolization passed as first arg. of each callback."""
+
+  def __init__(self, name, source_path, source_line, was_ambiguous=False,
+               disambiguated=False):
+    """All the fields here can be None (if addr2line replies with '??')."""
+    self.name = name
+    self.source_path = source_path
+    self.source_line = source_line
+    # In the case of |inlines|=True, the |inlined_by| points to the outer
+    # function inlining the current one (and so on, to form a chain).
+    self.inlined_by = None
+    self.disambiguated = disambiguated
+    self.was_ambiguous = was_ambiguous
+
+  def __str__(self):
+    return '%s [%s:%d]' % (
+        self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/src/build/android/pylib/symbols/elf_symbolizer_unittest.py b/src/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000..765b598
--- /dev/null
+++ b/src/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import unittest
+
+from pylib.symbols import elf_symbolizer
+from pylib.symbols import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+  def setUp(self):
+    self._callback = functools.partial(
+        ELFSymbolizerTest._SymbolizeCallback, self)
+    self._resolved_addresses = set()
+    # Mute warnings, we expect them due to the crash/hang tests.
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testParallelism1(self):
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+  def testParallelism4(self):
+    self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+  def testParallelism8(self):
+    self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+  def testCrash(self):
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+  def testHang(self):
+    os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+  def testInlines(self):
+    """Stimulate the inline processing logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        inlines=True,
+        max_concurrent_jobs=4)
+
+    for addr in xrange(1000):
+      exp_inline = False
+      exp_unknown = False
+
+      # First 100 addresses with inlines.
+      if addr < 100:
+        addr += _INLINE_MOCK_ADDR
+        exp_inline = True
+
+      # Followed by 100 without inlines.
+      elif addr < 200:
+        pass
+
+      # Followed by 100 interleaved inlines and not inlines.
+      elif addr < 300:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+
+      # Followed by 100 interleaved inlines and unknonwn.
+      elif addr < 400:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+        else:
+          addr += _UNKNOWN_MOCK_ADDR
+          exp_unknown = True
+
+      exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+      exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+      exp_source_line = addr if not exp_unknown else None
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testIncompleteSyminfo(self):
+    """Stimulate the symbol-not-resolved logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testWaitForIdle(self):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+    symbolizer.WaitForIdle()
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+    symbolizer.Join()
+
+  def _RunTest(self, max_concurrent_jobs, num_symbols):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=max_concurrent_jobs,
+        addr2line_timeout=0.5)
+
+    for addr in xrange(num_symbols):
+      exp_name = 'mock_sym_for_addr_%d' % addr
+      exp_source_path = 'mock_src/mock_lib1.so.c'
+      exp_source_line = addr
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+    # Check that all the expected callbacks have been received.
+    for addr in xrange(num_symbols):
+      self.assertIn(addr, self._resolved_addresses)
+      self._resolved_addresses.remove(addr)
+
+    # Check for unexpected callbacks.
+    self.assertEqual(len(self._resolved_addresses), 0)
+
+  def _SymbolizeCallback(self, sym_info, cb_arg):
+    self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+    self.assertTrue(isinstance(cb_arg, tuple))
+    self.assertEqual(len(cb_arg), 5)
+
+    # Unpack expectations from the callback extra argument.
+    (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+    if exp_name is None:
+      self.assertIsNone(sym_info.name)
+    else:
+      self.assertTrue(sym_info.name.startswith(exp_name))
+    self.assertEqual(sym_info.source_path, exp_source_path)
+    self.assertEqual(sym_info.source_line, exp_source_line)
+
+    if exp_inlines:
+      self.assertEqual(sym_info.name, exp_name + '_inner')
+      self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+      self.assertEqual(sym_info.inlined_by.inlined_by.name,
+                       exp_name + '_outer')
+
+    # Check against duplicate callbacks.
+    self.assertNotIn(addr, self._resolved_addresses)
+    self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/symbols/mock_addr2line/__init__.py b/src/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000..8b2a723
--- /dev/null
+++ b/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+from __future__ import print_function
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
+  # Silently swallow the other unnecessary arguments.
+  parser.add_option('-C', '--demangle', action='store_true')
+  parser.add_option('-f', '--functions', action='store_true')
+  parser.add_option('-i', '--inlines', action='store_true')
+  options, _ = parser.parse_args(argv[1:])
+  lib_file_name = posixpath.basename(options.exe)
+  processed_sym_count = 0
+  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+  while(True):
+    line = sys.stdin.readline().rstrip('\r')
+    if not line:
+      break
+
+    # An empty line should generate '??,??:0' (is used as marker for inlines).
+    if line == '\n':
+      print('??')
+      print('??:0')
+      sys.stdout.flush()
+      continue
+
+    addr = int(line, 16)
+    processed_sym_count += 1
+    if crash_every and processed_sym_count % crash_every == 0:
+      sys.exit(1)
+    if hang_every and processed_sym_count % hang_every == 0:
+      time.sleep(1)
+
+    # Addresses < 1M will return good mock symbol information.
+    if addr < 1024 * 1024:
+      print('mock_sym_for_addr_%d' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+
+    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+    elif addr < 2 * 1024 * 1024:
+      print('mock_sym_for_addr_%d' % addr)
+      print('??:0')
+
+    # Addresses 2M <= x < 3M will return unknown symbol information.
+    elif addr < 3 * 1024 * 1024:
+      print('??')
+      print('??')
+
+    # Addresses 3M <= x < 4M will return inlines.
+    elif addr < 4 * 1024 * 1024:
+      print('mock_sym_for_addr_%d_inner' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+      print('mock_sym_for_addr_%d_middle' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+      print('mock_sym_for_addr_%d_outer' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+
+    sys.stdout.flush()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
\ No newline at end of file
diff --git a/src/build/android/pylib/symbols/stack_symbolizer.py b/src/build/android/pylib/symbols/stack_symbolizer.py
new file mode 100644
index 0000000..4173741
--- /dev/null
+++ b/src/build/android/pylib/symbols/stack_symbolizer.py
@@ -0,0 +1,86 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+import time
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+_STACK_TOOL = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..',
+                          'third_party', 'android_platform', 'development',
+                          'scripts', 'stack')
+ABI_REG = re.compile('ABI: \'(.+?)\'')
+
+
+def _DeviceAbiToArch(device_abi):
+  # The order of this list is significant to find the more specific match
+  # (e.g., arm64) before the less specific (e.g., arm).
+  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+  for arch in arches:
+    if arch in device_abi:
+      return arch
+  raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+
+class Symbolizer(object):
+  """A helper class to symbolize stack."""
+
+  def __init__(self, apk_under_test=None):
+    self._apk_under_test = apk_under_test
+    self._time_spent_symbolizing = 0
+
+
+  def __del__(self):
+    self.CleanUp()
+
+
+  def CleanUp(self):
+    """Clean up the temporary directory of apk libs."""
+    if self._time_spent_symbolizing > 0:
+      logging.info(
+          'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing)
+
+
+  def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
+                                         device_abi, include_stack=True):
+    """Run the stack tool for given input.
+
+    Args:
+      data_to_symbolize: a list of strings to symbolize.
+      include_stack: boolean whether to include stack data in output.
+      device_abi: the default ABI of the device which generated the tombstone.
+
+    Yields:
+      A string for each line of resolved stack output.
+    """
+    if not os.path.exists(_STACK_TOOL):
+      logging.warning('%s missing. Unable to resolve native stack traces.',
+                      _STACK_TOOL)
+      return
+
+    arch = _DeviceAbiToArch(device_abi)
+    if not arch:
+      logging.warning('No device_abi can be found.')
+      return
+
+    cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
+           constants.GetOutDirectory(), '--more-info']
+    env = dict(os.environ)
+    env['PYTHONDONTWRITEBYTECODE'] = '1'
+    with tempfile.NamedTemporaryFile() as f:
+      f.write('\n'.join(data_to_symbolize))
+      f.flush()
+      start = time.time()
+      try:
+        _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
+      finally:
+        self._time_spent_symbolizing += time.time() - start
+    for line in output.splitlines():
+      if not include_stack and 'Stack Data:' in line:
+        break
+      yield line
diff --git a/src/build/android/pylib/symbols/symbol_utils.py b/src/build/android/pylib/symbols/symbol_utils.py
new file mode 100644
index 0000000..dea3c63
--- /dev/null
+++ b/src/build/android/pylib/symbols/symbol_utils.py
@@ -0,0 +1,814 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import bisect
+import collections
+import logging
+import os
+import re
+
+from pylib.constants import host_paths
+from pylib.symbols import elf_symbolizer
+
+
+def _AndroidAbiToCpuArch(android_abi):
+  """Return the Chromium CPU architecture name for a given Android ABI."""
+  _ARCH_MAP = {
+    'armeabi': 'arm',
+    'armeabi-v7a': 'arm',
+    'arm64-v8a': 'arm64',
+    'x86_64': 'x64',
+  }
+  return _ARCH_MAP.get(android_abi, android_abi)
+
+
+def _HexAddressRegexpFor(android_abi):
+  """Return a regexp matching hexadecimal addresses for a given Android ABI."""
+  if android_abi in ['x86_64', 'arm64-v8a', 'mips64']:
+    width = 16
+  else:
+    width = 8
+  return '[0-9a-f]{%d}' % width
+
+
+class HostLibraryFinder(object):
+  """Translate device library path to matching host unstripped library path.
+
+  Usage is the following:
+    1) Create instance.
+    2) Call AddSearchDir() once or more times to add host directory path to
+       look for unstripped native libraries.
+    3) Call Find(device_libpath) repeatedly to translate a device-specific
+       library path into the corresponding host path to the unstripped
+       version.
+  """
+  def __init__(self):
+    """Initialize instance."""
+    self._search_dirs = []
+    self._lib_map = {}        # Map of library name to host file paths.
+
+  def AddSearchDir(self, lib_dir):
+    """Add a directory to the search path for host native shared libraries.
+
+    Args:
+      lib_dir: host path containing native libraries.
+    """
+    if not os.path.exists(lib_dir):
+      logging.warning('Ignoring missing host library directory: %s', lib_dir)
+      return
+    if not os.path.isdir(lib_dir):
+      logging.warning('Ignoring invalid host library directory: %s', lib_dir)
+      return
+    self._search_dirs.append(lib_dir)
+    self._lib_map = {}  # Reset the map.
+
+  def Find(self, device_libpath):
+    """Find the host file path matching a specific device library path.
+
+    Args:
+      device_libpath: device-specific file path to library or executable.
+    Returns:
+      host file path to the unstripped version of the library, or None.
+    """
+    host_lib_path = None
+    lib_name = os.path.basename(device_libpath)
+    host_lib_path = self._lib_map.get(lib_name)
+    if not host_lib_path:
+      for search_dir in self._search_dirs:
+        lib_path = os.path.join(search_dir, lib_name)
+        if os.path.exists(lib_path):
+          host_lib_path = lib_path
+          break
+
+      if not host_lib_path:
+        logging.debug('Could not find host library for: %s', lib_name)
+      self._lib_map[lib_name] = host_lib_path
+
+    return host_lib_path
+
+
+
+class SymbolResolver(object):
+  """A base class for objets that can symbolize library (path, offset)
+     pairs into symbol information strings. Usage is the following:
+
+     1) Create new instance (by calling the constructor of a derived
+        class, since this is only the base one).
+
+     2) Call SetAndroidAbi() before any call to FindSymbolInfo() in order
+        to set the Android CPU ABI used for symbolization.
+
+     3) Before the first call to FindSymbolInfo(), one can call
+        AddLibraryOffset(), or AddLibraryOffsets() to record a set of offsets
+        that you will want to symbolize later through FindSymbolInfo(). Doing
+        so allows some SymbolResolver derived classes to work faster (e.g. the
+        one that invokes the 'addr2line' program, since the latter works faster
+        if the offsets provided as inputs are sorted in increasing order).
+
+     3) Call FindSymbolInfo(path, offset) to return the corresponding
+        symbol information string, or None if this doesn't correspond
+        to anything the instance can handle.
+
+        Note that whether the path is specific to the device or to the
+        host depends on the derived class implementation.
+  """
+  def __init__(self):
+    self._android_abi = None
+    self._lib_offsets_map = collections.defaultdict(set)
+
+  def SetAndroidAbi(self, android_abi):
+    """Set the Android ABI value for this instance.
+
+    Calling this function before FindSymbolInfo() is required by some
+    derived class implementations.
+
+    Args:
+      android_abi: Native Android CPU ABI name (e.g. 'armeabi-v7a').
+    Raises:
+      Exception if the ABI was already set with a different value.
+    """
+    if self._android_abi and self._android_abi != android_abi:
+      raise Exception('Cannot reset Android ABI to new value %s, already set '
+                      'to %s' % (android_abi, self._android_abi))
+
+    self._android_abi = android_abi
+
+  def AddLibraryOffset(self, lib_path, offset):
+    """Associate a single offset to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      lib_path: A library path.
+      offset: An integer offset within the corresponding library that will be
+        symbolized by future calls to FindSymbolInfo.
+    """
+    self._lib_offsets_map[lib_path].add(offset)
+
+  def AddLibraryOffsets(self, lib_path, lib_offsets):
+    """Associate a set of wanted offsets to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      lib_path: A library path.
+      lib_offsets: An iterable of integer offsets within the corresponding
+        library that will be symbolized by future calls to FindSymbolInfo.
+    """
+    self._lib_offsets_map[lib_path].update(lib_offsets)
+
+  # pylint: disable=unused-argument,no-self-use
+  def FindSymbolInfo(self, lib_path, lib_offset):
+    """Symbolize a device library path and offset.
+
+    Args:
+      lib_path: Library path (device or host specific, depending on the
+        derived class implementation).
+      lib_offset: Integer offset within the library.
+    Returns:
+      Corresponding symbol information string, or None.
+    """
+    # The base implementation cannot symbolize anything.
+    return None
+  # pylint: enable=unused-argument,no-self-use
+
+
+class ElfSymbolResolver(SymbolResolver):
+  """A SymbolResolver that can symbolize host path + offset values using
+     an elf_symbolizer.ELFSymbolizer instance.
+  """
+  def __init__(self, addr2line_path_for_tests=None):
+    super(ElfSymbolResolver, self).__init__()
+    self._addr2line_path = addr2line_path_for_tests
+
+    # Used to cache one ELFSymbolizer instance per library path.
+    self._elf_symbolizer_cache = {}
+
+    # Used to cache FindSymbolInfo() results. Maps host library paths
+    # to (offset -> symbol info string) dictionaries.
+    self._symbol_info_cache = collections.defaultdict(dict)
+    self._allow_symbolizer = True
+
+  def _CreateSymbolizerFor(self, host_path):
+    """Create the ELFSymbolizer instance associated with a given lib path."""
+    addr2line_path = self._addr2line_path
+    if not addr2line_path:
+      if not self._android_abi:
+        raise Exception(
+            'Android CPU ABI must be set before calling FindSymbolInfo!')
+
+      cpu_arch = _AndroidAbiToCpuArch(self._android_abi)
+      self._addr2line_path = host_paths.ToolPath('addr2line', cpu_arch)
+
+    return elf_symbolizer.ELFSymbolizer(
+        elf_file_path=host_path, addr2line_path=self._addr2line_path,
+        callback=ElfSymbolResolver._Callback, inlines=True)
+
+  def DisallowSymbolizerForTesting(self):
+    """Disallow FindSymbolInfo() from using a symbolizer.
+
+    This is used during unit-testing to ensure that the offsets that were
+    recorded via AddLibraryOffset()/AddLibraryOffsets() are properly
+    symbolized, but not anything else.
+    """
+    self._allow_symbolizer = False
+
+  def FindSymbolInfo(self, host_path, offset):
+    """Override SymbolResolver.FindSymbolInfo.
+
+    Args:
+      host_path: Host-specific path to the native shared library.
+      offset: Integer offset within the native library.
+    Returns:
+      A symbol info string, or None.
+    """
+    offset_map = self._symbol_info_cache[host_path]
+    symbol_info = offset_map.get(offset)
+    if symbol_info:
+      return symbol_info
+
+    # Create symbolizer on demand.
+    symbolizer = self._elf_symbolizer_cache.get(host_path)
+    if not symbolizer:
+      symbolizer = self._CreateSymbolizerFor(host_path)
+      self._elf_symbolizer_cache[host_path] = symbolizer
+
+      # If there are pre-recorded offsets for this path, symbolize them now.
+      offsets = self._lib_offsets_map.get(host_path)
+      if offsets:
+        offset_map = {}
+        for pre_offset in offsets:
+          symbolizer.SymbolizeAsync(
+              pre_offset, callback_arg=(offset_map, pre_offset))
+        symbolizer.WaitForIdle()
+        self._symbol_info_cache[host_path] = offset_map
+
+        symbol_info = offset_map.get(offset)
+        if symbol_info:
+          return symbol_info
+
+    if not self._allow_symbolizer:
+      return None
+
+    # Symbolize single offset. Slower if addresses are not provided in
+    # increasing order to addr2line.
+    symbolizer.SymbolizeAsync(offset,
+                              callback_arg=(offset_map, offset))
+    symbolizer.WaitForIdle()
+    return offset_map.get(offset)
+
+  @staticmethod
+  def _Callback(sym_info, callback_arg):
+    offset_map, offset = callback_arg
+    offset_map[offset] = str(sym_info)
+
+
+class DeviceSymbolResolver(SymbolResolver):
+  """A SymbolResolver instance that accepts device-specific path.
+
+  Usage is the following:
+    1) Create new instance, passing a parent SymbolResolver instance that
+       accepts host-specific paths, and a HostLibraryFinder instance.
+
+    2) Optional: call AddApkOffsets() to add offsets from within an APK
+       that contains uncompressed native shared libraries.
+
+    3) Use it as any SymbolResolver instance.
+  """
+  def __init__(self, host_resolver, host_lib_finder):
+    """Initialize instance.
+
+    Args:
+      host_resolver: A parent SymbolResolver instance that will be used
+        to resolve symbols from host library paths.
+      host_lib_finder: A HostLibraryFinder instance used to locate
+        unstripped libraries on the host.
+    """
+    super(DeviceSymbolResolver, self).__init__()
+    self._host_lib_finder = host_lib_finder
+    self._bad_device_lib_paths = set()
+    self._host_resolver = host_resolver
+
+  def SetAndroidAbi(self, android_abi):
+    super(DeviceSymbolResolver, self).SetAndroidAbi(android_abi)
+    self._host_resolver.SetAndroidAbi(android_abi)
+
+  def AddLibraryOffsets(self, device_lib_path, lib_offsets):
+    """Associate a set of wanted offsets to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      device_lib_path: A device-specific library path.
+      lib_offsets: An iterable of integer offsets within the corresponding
+        library that will be symbolized by future calls to FindSymbolInfo.
+        want to symbolize.
+    """
+    if device_lib_path in self._bad_device_lib_paths:
+      return
+
+    host_lib_path = self._host_lib_finder.Find(device_lib_path)
+    if not host_lib_path:
+      # NOTE: self._bad_device_lib_paths is only used to only print this
+      #       warning once per bad library.
+      logging.warning('Could not find host library matching device path: %s',
+                      device_lib_path)
+      self._bad_device_lib_paths.add(device_lib_path)
+      return
+
+    self._host_resolver.AddLibraryOffsets(host_lib_path, lib_offsets)
+
+  def AddApkOffsets(self, device_apk_path, apk_offsets, apk_translator):
+    """Associate a set of wanted offsets to a given device APK path.
+
+    This converts the APK-relative offsets into offsets relative to the
+    uncompressed libraries it contains, then calls AddLibraryOffsets()
+    for each one of the libraries.
+
+    Must be called before FindSymbolInfo() as well, otherwise input arguments
+    will be ignored.
+
+    Args:
+      device_apk_path: Device-specific APK path.
+      apk_offsets: Iterable of offsets within the APK file.
+      apk_translator: An ApkLibraryPathTranslator instance used to extract
+        library paths from the APK.
+    """
+    libraries_map = collections.defaultdict(set)
+    for offset in apk_offsets:
+      lib_path, lib_offset = apk_translator.TranslatePath(device_apk_path,
+                                                          offset)
+      libraries_map[lib_path].add(lib_offset)
+
+    for lib_path, lib_offsets in libraries_map.iteritems():
+      self.AddLibraryOffsets(lib_path, lib_offsets)
+
+  def FindSymbolInfo(self, device_path, offset):
+    """Overrides SymbolResolver.FindSymbolInfo.
+
+    Args:
+      device_path: Device-specific library path (e.g.
+        '/data/app/com.example.app-1/lib/x86/libfoo.so')
+      offset: Offset in device library path.
+    Returns:
+      Corresponding symbol information string, or None.
+    """
+    host_path = self._host_lib_finder.Find(device_path)
+    if not host_path:
+      return None
+
+    return self._host_resolver.FindSymbolInfo(host_path, offset)
+
+
+class MemoryMap(object):
+  """Models the memory map of a given process. Usage is:
+
+    1) Create new instance, passing the Android ABI.
+
+    2) Call TranslateLine() whenever you want to detect and translate any
+       memory map input line.
+
+    3) Otherwise, it is possible to parse the whole memory map input with
+       ParseLines(), then call FindSectionForAddress() repeatedly in order
+       to translate a memory address into the corresponding mapping and
+       file information tuple (e.g. to symbolize stack entries).
+  """
+
+  # A named tuple describing interesting memory map line items.
+  # Fields:
+  #   addr_start: Mapping start address in memory.
+  #   file_offset: Corresponding file offset.
+  #   file_size: Corresponding mapping size in bytes.
+  #   file_path: Input file path.
+  #   match: Corresponding regular expression match object.
+  LineTuple = collections.namedtuple('MemoryMapLineTuple',
+                                     'addr_start,file_offset,file_size,'
+                                     'file_path, match')
+
+  # A name tuple describing a memory map section.
+  # Fields:
+  #   address: Memory address.
+  #   size: Size in bytes in memory
+  #   offset: Starting file offset.
+  #   path: Input file path.
+  SectionTuple = collections.namedtuple('MemoryMapSection',
+                                        'address,size,offset,path')
+
+  def __init__(self, android_abi):
+    """Initializes instance.
+
+    Args:
+      android_abi: Android CPU ABI name (e.g. 'armeabi-v7a')
+    """
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # pylint: disable=line-too-long
+    # A regular expression used to match memory map entries which look like:
+    #    b278c000-b2790fff r--   4fda000      5000  /data/app/com.google.android.apps.chrome-2/base.apk
+    # pylint: enable=line-too-long
+    self._re_map_section = re.compile(
+        r'\s*(?P<addr_start>' + hex_addr + r')-(?P<addr_end>' + hex_addr + ')' +
+        r'\s+' +
+        r'(?P<perm>...)\s+' +
+        r'(?P<file_offset>[0-9a-f]+)\s+' +
+        r'(?P<file_size>[0-9a-f]+)\s*' +
+        r'(?P<file_path>[^ \t]+)?')
+
+    self._addr_map = []  # Sorted list of (address, size, path, offset) tuples.
+    self._sorted_addresses = []  # Sorted list of address fields in _addr_map.
+    self._in_section = False
+
+  def TranslateLine(self, line, apk_path_translator):
+    """Try to translate a memory map input line, if detected.
+
+    This only takes care of converting mapped APK file path and offsets
+    into a corresponding uncompressed native library file path + new offsets,
+    e.g. '..... <offset> <size> /data/.../base.apk' gets
+    translated into '.... <new-offset> <size> /data/.../base.apk!lib/libfoo.so'
+
+    This function should always work, even if ParseLines() was not called
+    previously.
+
+    Args:
+      line: Input memory map / tombstone line.
+      apk_translator: An ApkLibraryPathTranslator instance, used to map
+        APK offsets into uncompressed native libraries + new offsets.
+    Returns:
+      Translated memory map line, if relevant, or unchanged input line
+      otherwise.
+    """
+    t = self._ParseLine(line.rstrip())
+    if not t:
+      return line
+
+    new_path, new_offset = apk_path_translator.TranslatePath(
+        t.file_path, t.file_offset)
+
+    if new_path == t.file_path:
+      return line
+
+    pos = t.match.start('file_path')
+    return '%s%s (offset 0x%x)%s' % (line[0:pos], new_path, new_offset,
+                                     line[t.match.end('file_path'):])
+
+  def ParseLines(self, input_lines, in_section=False):
+    """Parse a list of input lines and extract the APK memory map out of it.
+
+    Args:
+      input_lines: list, or iterable, of input lines.
+      in_section: Optional. If true, considers that the input lines are
+        already part of the memory map. Otherwise, wait until the start of
+        the section appears in the input before trying to record data.
+    Returns:
+      True iff APK-related memory map entries were found. False otherwise.
+    """
+    addr_list = []  # list of (address, size, file_path, file_offset) tuples.
+    self._in_section = in_section
+    for line in input_lines:
+      t = self._ParseLine(line.rstrip())
+      if not t:
+        continue
+
+      addr_list.append(t)
+
+    self._addr_map = sorted(addr_list,
+                            lambda x, y: cmp(x.addr_start, y.addr_start))
+    self._sorted_addresses = [e.addr_start for e in self._addr_map]
+    return bool(self._addr_map)
+
+  def _ParseLine(self, line):
+    """Used internally to recognized memory map input lines.
+
+    Args:
+      line: Input logcat or tomstone line.
+    Returns:
+      A LineTuple instance on success, or None on failure.
+    """
+    if not self._in_section:
+      self._in_section = line.startswith('memory map:')
+      return None
+
+    m = self._re_map_section.match(line)
+    if not m:
+      self._in_section = False  # End of memory map section
+      return None
+
+    # Only accept .apk and .so files that are not from the system partitions.
+    file_path = m.group('file_path')
+    if not file_path:
+      return None
+
+    if file_path.startswith('/system') or file_path.startswith('/vendor'):
+      return None
+
+    if not (file_path.endswith('.apk') or file_path.endswith('.so')):
+      return None
+
+    addr_start = int(m.group('addr_start'), 16)
+    file_offset = int(m.group('file_offset'), 16)
+    file_size = int(m.group('file_size'), 16)
+
+    return self.LineTuple(addr_start, file_offset, file_size, file_path, m)
+
+  def Dump(self):
+    """Print memory map for debugging."""
+    print('MEMORY MAP [')
+    for t in self._addr_map:
+      print('[%08x-%08x %08x %08x %s]' %
+            (t.addr_start, t.addr_start + t.file_size, t.file_size,
+             t.file_offset, t.file_path))
+    print('] MEMORY MAP')
+
+  def FindSectionForAddress(self, addr):
+    """Find the map section corresponding to a specific memory address.
+
+    Call this method only after using ParseLines() was called to extract
+    relevant information from the memory map.
+
+    Args:
+      addr: Memory address
+    Returns:
+      A SectionTuple instance on success, or None on failure.
+    """
+    pos = bisect.bisect_right(self._sorted_addresses, addr)
+    if pos > 0:
+      # All values in [0,pos) are <= addr, just ensure that the last
+      # one contains the address as well.
+      entry = self._addr_map[pos - 1]
+      if entry.addr_start + entry.file_size > addr:
+        return self.SectionTuple(entry.addr_start, entry.file_size,
+                                 entry.file_offset, entry.file_path)
+    return None
+
+
+class BacktraceTranslator(object):
+  """Translates backtrace-related lines in a tombstone or crash report.
+
+  Usage is the following:
+    1) Create new instance with appropriate arguments.
+    2) If the tombstone / logcat input is available, one can call
+       FindLibraryOffsets() in order to detect which library offsets
+       will need to be symbolized during a future parse. Doing so helps
+       speed up the ELF symbolizer.
+    3) For each tombstone/logcat input line, call TranslateLine() to
+       try to detect and symbolize backtrace lines.
+  """
+
+  # A named tuple for relevant input backtrace lines.
+  # Fields:
+  #   rel_pc: Instruction pointer, relative to offset in library start.
+  #   location: Library or APK file path.
+  #   offset: Load base of executable code in library or apk file path.
+  #   match: The corresponding regular expression match object.
+  # Note:
+  #   The actual instruction pointer always matches the position at
+  #   |offset + rel_pc| in |location|.
+  LineTuple = collections.namedtuple('BacktraceLineTuple',
+                                      'rel_pc,location,offset,match')
+
+  def __init__(self, android_abi, apk_translator):
+    """Initialize instance.
+
+    Args:
+      android_abi: Android CPU ABI name (e.g. 'armeabi-v7a').
+      apk_translator: ApkLibraryPathTranslator instance used to convert
+        mapped APK file offsets into uncompressed library file paths with
+        new offsets.
+    """
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # A regular expression used to match backtrace lines.
+    self._re_backtrace = re.compile(
+        r'.*#(?P<frame>[0-9]{2})\s+' +
+        r'(..)\s+' +
+        r'(?P<rel_pc>' + hex_addr + r')\s+' +
+        r'(?P<location>[^ \t]+)' +
+        r'(\s+\(offset 0x(?P<offset>[0-9a-f]+)\))?')
+
+    # In certain cases, offset will be provided as <location>+0x<offset>
+    # instead of <location> (offset 0x<offset>). This is a regexp to detect
+    # this.
+    self._re_location_offset = re.compile(
+        r'.*\+0x(?P<offset>[0-9a-f]+)$')
+
+    self._apk_translator = apk_translator
+    self._in_section = False
+
+  def _ParseLine(self, line):
+    """Used internally to detect and decompose backtrace input lines.
+
+    Args:
+      line: input tombstone line.
+    Returns:
+      A LineTuple instance on success, None on failure.
+    """
+    if not self._in_section:
+      self._in_section = line.startswith('backtrace:')
+      return None
+
+    line = line.rstrip()
+    m = self._re_backtrace.match(line)
+    if not m:
+      self._in_section = False
+      return None
+
+    location = m.group('location')
+    offset = m.group('offset')
+    if not offset:
+      m2 = self._re_location_offset.match(location)
+      if m2:
+        offset = m2.group('offset')
+        location = location[0:m2.start('offset') - 3]
+
+    if not offset:
+      return None
+
+    offset = int(offset, 16)
+    rel_pc = int(m.group('rel_pc'), 16)
+
+    # Two cases to consider here:
+    #
+    # * If this is a library file directly mapped in memory, then |rel_pc|
+    #   if the direct offset within the library, and doesn't need any kind
+    #   of adjustement.
+    #
+    # * If this is a library mapped directly from an .apk file, then
+    #   |rel_pc| is the offset in the APK, and |offset| happens to be the
+    #   load base of the corresponding library.
+    #
+    if location.endswith('.so'):
+      # For a native library directly mapped from the file system,
+      return self.LineTuple(rel_pc, location, offset, m)
+
+    if location.endswith('.apk'):
+      # For a native library inside an memory-mapped APK file,
+      new_location, new_offset = self._apk_translator.TranslatePath(
+          location, offset)
+
+      return self.LineTuple(rel_pc, new_location, new_offset, m)
+
+    # Ignore anything else (e.g. .oat or .odex files).
+    return None
+
+  def FindLibraryOffsets(self, input_lines, in_section=False):
+    """Parse a tombstone's backtrace section and find all library offsets in it.
+
+    Args:
+      input_lines: List or iterables of intput tombstone lines.
+      in_section: Optional. If True, considers that the stack section has
+        already started.
+    Returns:
+      A dictionary mapping device library paths to sets of offsets within
+      then.
+    """
+    self._in_section = in_section
+    result = collections.defaultdict(set)
+    for line in input_lines:
+      t = self._ParseLine(line)
+      if not t:
+        continue
+
+      result[t.location].add(t.offset + t.rel_pc)
+    return result
+
+  def TranslateLine(self, line, symbol_resolver):
+    """Symbolize backtrace line if recognized.
+
+    Args:
+      line: input backtrace line.
+      symbol_resolver: symbol resolver instance to use. This method will
+        call its FindSymbolInfo(device_lib_path, lib_offset) method to
+        convert offsets into symbol informations strings.
+    Returns:
+      Translated line (unchanged if not recognized as a back trace).
+    """
+    t = self._ParseLine(line)
+    if not t:
+      return line
+
+    symbol_info = symbol_resolver.FindSymbolInfo(t.location,
+                                                 t.offset + t.rel_pc)
+    if not symbol_info:
+      symbol_info = 'offset 0x%x' % t.offset
+
+    pos = t.match.start('location')
+    pos2 = t.match.end('offset') + 1
+    if pos2 <= 0:
+      pos2 = t.match.end('location')
+    return '%s%s (%s)%s' % (line[:pos], t.location, symbol_info, line[pos2:])
+
+
+class StackTranslator(object):
+  """Translates stack-related lines in a tombstone or crash report."""
+
+  # A named tuple describing relevant stack input lines.
+  # Fields:
+  #  address: Address as it appears in the stack.
+  #  lib_path: Library path where |address| is mapped.
+  #  lib_offset: Library load base offset. for |lib_path|.
+  #  match: Corresponding regular expression match object.
+  LineTuple = collections.namedtuple('StackLineTuple',
+                                     'address, lib_path, lib_offset, match')
+
+  def __init__(self, android_abi, memory_map, apk_translator):
+    """Initialize instance."""
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # pylint: disable=line-too-long
+    # A regular expression used to recognize stack entries like:
+    #
+    #    #05  bf89a180  bf89a1e4  [stack]
+    #         bf89a1c8  a0c01c51  /data/app/com.google.android.apps.chrome-2/base.apk
+    #         bf89a080  00000000
+    #         ........  ........
+    # pylint: enable=line-too-long
+    self._re_stack_line = re.compile(
+        r'\s+(?P<frame_number>#[0-9]+)?\s*' +
+        r'(?P<stack_addr>' + hex_addr + r')\s+' +
+        r'(?P<stack_value>' + hex_addr + r')' +
+        r'(\s+(?P<location>[^ \t]+))?')
+
+    self._re_stack_abbrev = re.compile(r'\s+[.]+\s+[.]+')
+
+    self._memory_map = memory_map
+    self._apk_translator = apk_translator
+    self._in_section = False
+
+  def _ParseLine(self, line):
+    """Check a given input line for a relevant _re_stack_line match.
+
+    Args:
+      line: input tombstone line.
+    Returns:
+      A LineTuple instance on success, None on failure.
+    """
+    line = line.rstrip()
+    if not self._in_section:
+      self._in_section = line.startswith('stack:')
+      return None
+
+    m = self._re_stack_line.match(line)
+    if not m:
+      if not self._re_stack_abbrev.match(line):
+        self._in_section = False
+      return None
+
+    location = m.group('location')
+    if not location:
+      return None
+
+    if not location.endswith('.apk') and not location.endswith('.so'):
+      return None
+
+    addr = int(m.group('stack_value'), 16)
+    t = self._memory_map.FindSectionForAddress(addr)
+    if t is None:
+      return None
+
+    lib_path = t.path
+    lib_offset = t.offset + (addr - t.address)
+
+    if lib_path.endswith('.apk'):
+      lib_path, lib_offset = self._apk_translator.TranslatePath(
+          lib_path, lib_offset)
+
+    return self.LineTuple(addr, lib_path, lib_offset, m)
+
+  def FindLibraryOffsets(self, input_lines, in_section=False):
+    """Parse a tombstone's stack section and find all library offsets in it.
+
+    Args:
+      input_lines: List or iterables of intput tombstone lines.
+      in_section: Optional. If True, considers that the stack section has
+        already started.
+    Returns:
+      A dictionary mapping device library paths to sets of offsets within
+      then.
+    """
+    result = collections.defaultdict(set)
+    self._in_section = in_section
+    for line in input_lines:
+      t = self._ParseLine(line)
+      if t:
+        result[t.lib_path].add(t.lib_offset)
+    return result
+
+  def TranslateLine(self, line, symbol_resolver=None):
+    """Try to translate a line of the stack dump."""
+    t = self._ParseLine(line)
+    if not t:
+      return line
+
+    symbol_info = symbol_resolver.FindSymbolInfo(t.lib_path, t.lib_offset)
+    if not symbol_info:
+      return line
+
+    pos = t.match.start('location')
+    pos2 = t.match.end('location')
+    return '%s%s (%s)%s' % (line[:pos], t.lib_path, symbol_info, line[pos2:])
diff --git a/src/build/android/pylib/symbols/symbol_utils_unittest.py b/src/build/android/pylib/symbols/symbol_utils_unittest.py
new file mode 100644
index 0000000..ed87f9e
--- /dev/null
+++ b/src/build/android/pylib/symbols/symbol_utils_unittest.py
@@ -0,0 +1,942 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import logging
+import os
+import re
+import shutil
+import tempfile
+import unittest
+
+from pylib.symbols import apk_native_libs_unittest
+from pylib.symbols import mock_addr2line
+from pylib.symbols import symbol_utils
+
+_MOCK_ELF_DATA = apk_native_libs_unittest.MOCK_ELF_DATA
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+
+
+# pylint: disable=line-too-long
+
+# list of (start_offset, end_offset, size, libpath) tuples corresponding
+# to the content of base.apk. This was taken from an x86 ChromeModern.apk
+# component build.
+_TEST_APK_LIBS = [
+  (0x01331000, 0x013696bc, 0x000386bc, 'libaccessibility.cr.so'),
+  (0x0136a000, 0x013779c4, 0x0000d9c4, 'libanimation.cr.so'),
+  (0x01378000, 0x0137f7e8, 0x000077e8, 'libapdu.cr.so'),
+  (0x01380000, 0x0155ccc8, 0x001dccc8, 'libbase.cr.so'),
+  (0x0155d000, 0x015ab98c, 0x0004e98c, 'libbase_i18n.cr.so'),
+  (0x015ac000, 0x015dff4c, 0x00033f4c, 'libbindings.cr.so'),
+  (0x015e0000, 0x015f5a54, 0x00015a54, 'libbindings_base.cr.so'),
+  (0x0160e000, 0x01731960, 0x00123960, 'libblink_common.cr.so'),
+  (0x01732000, 0x0174ce54, 0x0001ae54, 'libblink_controller.cr.so'),
+  (0x0174d000, 0x0318c528, 0x01a3f528, 'libblink_core.cr.so'),
+  (0x0318d000, 0x03191700, 0x00004700, 'libblink_mojom_broadcastchannel_bindings_shared.cr.so'),
+  (0x03192000, 0x03cd7918, 0x00b45918, 'libblink_modules.cr.so'),
+  (0x03cd8000, 0x03d137d0, 0x0003b7d0, 'libblink_mojo_bindings_shared.cr.so'),
+  (0x03d14000, 0x03d2670c, 0x0001270c, 'libblink_offscreen_canvas_mojo_bindings_shared.cr.so'),
+  (0x03d27000, 0x046c7054, 0x009a0054, 'libblink_platform.cr.so'),
+  (0x046c8000, 0x0473fbfc, 0x00077bfc, 'libbluetooth.cr.so'),
+  (0x04740000, 0x04878f40, 0x00138f40, 'libboringssl.cr.so'),
+  (0x04879000, 0x0498466c, 0x0010b66c, 'libc++_shared.so'),
+  (0x04985000, 0x0498d93c, 0x0000893c, 'libcaptive_portal.cr.so'),
+  (0x0498e000, 0x049947cc, 0x000067cc, 'libcapture_base.cr.so'),
+  (0x04995000, 0x04b39f18, 0x001a4f18, 'libcapture_lib.cr.so'),
+  (0x04b3a000, 0x04b488ec, 0x0000e8ec, 'libcbor.cr.so'),
+  (0x04b49000, 0x04e9ea5c, 0x00355a5c, 'libcc.cr.so'),
+  (0x04e9f000, 0x04ed6404, 0x00037404, 'libcc_animation.cr.so'),
+  (0x04ed7000, 0x04ef5ab4, 0x0001eab4, 'libcc_base.cr.so'),
+  (0x04ef6000, 0x04fd9364, 0x000e3364, 'libcc_blink.cr.so'),
+  (0x04fda000, 0x04fe2758, 0x00008758, 'libcc_debug.cr.so'),
+  (0x04fe3000, 0x0500ae0c, 0x00027e0c, 'libcc_ipc.cr.so'),
+  (0x0500b000, 0x05078f38, 0x0006df38, 'libcc_paint.cr.so'),
+  (0x05079000, 0x0507e734, 0x00005734, 'libcdm_manager.cr.so'),
+  (0x0507f000, 0x06f4d744, 0x01ece744, 'libchrome.cr.so'),
+  (0x06f54000, 0x06feb830, 0x00097830, 'libchromium_sqlite3.cr.so'),
+  (0x06fec000, 0x0706f554, 0x00083554, 'libclient.cr.so'),
+  (0x07070000, 0x0708da60, 0x0001da60, 'libcloud_policy_proto_generated_compile.cr.so'),
+  (0x0708e000, 0x07121f28, 0x00093f28, 'libcodec.cr.so'),
+  (0x07122000, 0x07134ab8, 0x00012ab8, 'libcolor_space.cr.so'),
+  (0x07135000, 0x07138614, 0x00003614, 'libcommon.cr.so'),
+  (0x07139000, 0x0717c938, 0x00043938, 'libcompositor.cr.so'),
+  (0x0717d000, 0x0923d78c, 0x020c078c, 'libcontent.cr.so'),
+  (0x0923e000, 0x092ae87c, 0x0007087c, 'libcontent_common_mojo_bindings_shared.cr.so'),
+  (0x092af000, 0x092be718, 0x0000f718, 'libcontent_public_common_mojo_bindings_shared.cr.so'),
+  (0x092bf000, 0x092d9a20, 0x0001aa20, 'libcrash_key.cr.so'),
+  (0x092da000, 0x092eda58, 0x00013a58, 'libcrcrypto.cr.so'),
+  (0x092ee000, 0x092f16e0, 0x000036e0, 'libdevice_base.cr.so'),
+  (0x092f2000, 0x092fe8d8, 0x0000c8d8, 'libdevice_event_log.cr.so'),
+  (0x092ff000, 0x093026a4, 0x000036a4, 'libdevice_features.cr.so'),
+  (0x09303000, 0x093f1220, 0x000ee220, 'libdevice_gamepad.cr.so'),
+  (0x093f2000, 0x09437f54, 0x00045f54, 'libdevice_vr_mojo_bindings.cr.so'),
+  (0x09438000, 0x0954c168, 0x00114168, 'libdevice_vr_mojo_bindings_blink.cr.so'),
+  (0x0954d000, 0x0955d720, 0x00010720, 'libdevice_vr_mojo_bindings_shared.cr.so'),
+  (0x0955e000, 0x0956b9c0, 0x0000d9c0, 'libdevices.cr.so'),
+  (0x0956c000, 0x0957cae8, 0x00010ae8, 'libdiscardable_memory_client.cr.so'),
+  (0x0957d000, 0x09588854, 0x0000b854, 'libdiscardable_memory_common.cr.so'),
+  (0x09589000, 0x0959cbb4, 0x00013bb4, 'libdiscardable_memory_service.cr.so'),
+  (0x0959d000, 0x095b6b90, 0x00019b90, 'libdisplay.cr.so'),
+  (0x095b7000, 0x095be930, 0x00007930, 'libdisplay_types.cr.so'),
+  (0x095bf000, 0x095c46c4, 0x000056c4, 'libdisplay_util.cr.so'),
+  (0x095c5000, 0x095f54a4, 0x000304a4, 'libdomain_reliability.cr.so'),
+  (0x095f6000, 0x0966fe08, 0x00079e08, 'libembedder.cr.so'),
+  (0x09670000, 0x096735f8, 0x000035f8, 'libembedder_switches.cr.so'),
+  (0x09674000, 0x096a3460, 0x0002f460, 'libevents.cr.so'),
+  (0x096a4000, 0x096b6d40, 0x00012d40, 'libevents_base.cr.so'),
+  (0x096b7000, 0x0981a778, 0x00163778, 'libffmpeg.cr.so'),
+  (0x0981b000, 0x09945c94, 0x0012ac94, 'libfido.cr.so'),
+  (0x09946000, 0x09a330dc, 0x000ed0dc, 'libfingerprint.cr.so'),
+  (0x09a34000, 0x09b53170, 0x0011f170, 'libfreetype_harfbuzz.cr.so'),
+  (0x09b54000, 0x09bc5c5c, 0x00071c5c, 'libgcm.cr.so'),
+  (0x09bc6000, 0x09cc8584, 0x00102584, 'libgeolocation.cr.so'),
+  (0x09cc9000, 0x09cdc8d4, 0x000138d4, 'libgeometry.cr.so'),
+  (0x09cdd000, 0x09cec8b4, 0x0000f8b4, 'libgeometry_skia.cr.so'),
+  (0x09ced000, 0x09d10e14, 0x00023e14, 'libgesture_detection.cr.so'),
+  (0x09d11000, 0x09d7595c, 0x0006495c, 'libgfx.cr.so'),
+  (0x09d76000, 0x09d7d7cc, 0x000077cc, 'libgfx_ipc.cr.so'),
+  (0x09d7e000, 0x09d82708, 0x00004708, 'libgfx_ipc_buffer_types.cr.so'),
+  (0x09d83000, 0x09d89748, 0x00006748, 'libgfx_ipc_color.cr.so'),
+  (0x09d8a000, 0x09d8f6f4, 0x000056f4, 'libgfx_ipc_geometry.cr.so'),
+  (0x09d90000, 0x09d94754, 0x00004754, 'libgfx_ipc_skia.cr.so'),
+  (0x09d95000, 0x09d9869c, 0x0000369c, 'libgfx_switches.cr.so'),
+  (0x09d99000, 0x09dba0ac, 0x000210ac, 'libgin.cr.so'),
+  (0x09dbb000, 0x09e0a8cc, 0x0004f8cc, 'libgl_in_process_context.cr.so'),
+  (0x09e0b000, 0x09e17a18, 0x0000ca18, 'libgl_init.cr.so'),
+  (0x09e18000, 0x09ee34e4, 0x000cb4e4, 'libgl_wrapper.cr.so'),
+  (0x09ee4000, 0x0a1a2e00, 0x002bee00, 'libgles2.cr.so'),
+  (0x0a1a3000, 0x0a24556c, 0x000a256c, 'libgles2_implementation.cr.so'),
+  (0x0a246000, 0x0a267038, 0x00021038, 'libgles2_utils.cr.so'),
+  (0x0a268000, 0x0a3288e4, 0x000c08e4, 'libgpu.cr.so'),
+  (0x0a329000, 0x0a3627ec, 0x000397ec, 'libgpu_ipc_service.cr.so'),
+  (0x0a363000, 0x0a388a18, 0x00025a18, 'libgpu_util.cr.so'),
+  (0x0a389000, 0x0a506d8c, 0x0017dd8c, 'libhost.cr.so'),
+  (0x0a507000, 0x0a6f0ec0, 0x001e9ec0, 'libicui18n.cr.so'),
+  (0x0a6f1000, 0x0a83b4c8, 0x0014a4c8, 'libicuuc.cr.so'),
+  (0x0a83c000, 0x0a8416e4, 0x000056e4, 'libinterfaces_shared.cr.so'),
+  (0x0a842000, 0x0a87e2a0, 0x0003c2a0, 'libipc.cr.so'),
+  (0x0a87f000, 0x0a88c98c, 0x0000d98c, 'libipc_mojom.cr.so'),
+  (0x0a88d000, 0x0a8926e4, 0x000056e4, 'libipc_mojom_shared.cr.so'),
+  (0x0a893000, 0x0a8a1e18, 0x0000ee18, 'libkeyed_service_content.cr.so'),
+  (0x0a8a2000, 0x0a8b4a30, 0x00012a30, 'libkeyed_service_core.cr.so'),
+  (0x0a8b5000, 0x0a930a80, 0x0007ba80, 'libleveldatabase.cr.so'),
+  (0x0a931000, 0x0a9b3908, 0x00082908, 'libmanager.cr.so'),
+  (0x0a9b4000, 0x0aea9bb4, 0x004f5bb4, 'libmedia.cr.so'),
+  (0x0aeaa000, 0x0b08cb88, 0x001e2b88, 'libmedia_blink.cr.so'),
+  (0x0b08d000, 0x0b0a4728, 0x00017728, 'libmedia_devices_mojo_bindings_shared.cr.so'),
+  (0x0b0a5000, 0x0b1943ec, 0x000ef3ec, 'libmedia_gpu.cr.so'),
+  (0x0b195000, 0x0b2d07d4, 0x0013b7d4, 'libmedia_mojo_services.cr.so'),
+  (0x0b2d1000, 0x0b2d4760, 0x00003760, 'libmessage_center.cr.so'),
+  (0x0b2d5000, 0x0b2e0938, 0x0000b938, 'libmessage_support.cr.so'),
+  (0x0b2e1000, 0x0b2f3ad0, 0x00012ad0, 'libmetrics_cpp.cr.so'),
+  (0x0b2f4000, 0x0b313bb8, 0x0001fbb8, 'libmidi.cr.so'),
+  (0x0b314000, 0x0b31b848, 0x00007848, 'libmojo_base_lib.cr.so'),
+  (0x0b31c000, 0x0b3329f8, 0x000169f8, 'libmojo_base_mojom.cr.so'),
+  (0x0b333000, 0x0b34b98c, 0x0001898c, 'libmojo_base_mojom_blink.cr.so'),
+  (0x0b34c000, 0x0b354700, 0x00008700, 'libmojo_base_mojom_shared.cr.so'),
+  (0x0b355000, 0x0b3608b0, 0x0000b8b0, 'libmojo_base_shared_typemap_traits.cr.so'),
+  (0x0b361000, 0x0b3ad454, 0x0004c454, 'libmojo_edk.cr.so'),
+  (0x0b3ae000, 0x0b3c4a20, 0x00016a20, 'libmojo_edk_ports.cr.so'),
+  (0x0b3c5000, 0x0b3d38a0, 0x0000e8a0, 'libmojo_mojom_bindings.cr.so'),
+  (0x0b3d4000, 0x0b3da6e8, 0x000066e8, 'libmojo_mojom_bindings_shared.cr.so'),
+  (0x0b3db000, 0x0b3e27f0, 0x000077f0, 'libmojo_public_system.cr.so'),
+  (0x0b3e3000, 0x0b3fa9fc, 0x000179fc, 'libmojo_public_system_cpp.cr.so'),
+  (0x0b3fb000, 0x0b407728, 0x0000c728, 'libmojom_core_shared.cr.so'),
+  (0x0b408000, 0x0b421744, 0x00019744, 'libmojom_platform_shared.cr.so'),
+  (0x0b422000, 0x0b43451c, 0x0001251c, 'libnative_theme.cr.so'),
+  (0x0b435000, 0x0baaa1bc, 0x006751bc, 'libnet.cr.so'),
+  (0x0bac4000, 0x0bb74670, 0x000b0670, 'libnetwork_cpp.cr.so'),
+  (0x0bb75000, 0x0bbaee8c, 0x00039e8c, 'libnetwork_cpp_base.cr.so'),
+  (0x0bbaf000, 0x0bd21844, 0x00172844, 'libnetwork_service.cr.so'),
+  (0x0bd22000, 0x0bd256e4, 0x000036e4, 'libnetwork_session_configurator.cr.so'),
+  (0x0bd26000, 0x0bd33734, 0x0000d734, 'libonc.cr.so'),
+  (0x0bd34000, 0x0bd9ce18, 0x00068e18, 'libperfetto.cr.so'),
+  (0x0bd9d000, 0x0bda4854, 0x00007854, 'libplatform.cr.so'),
+  (0x0bda5000, 0x0bec5ce4, 0x00120ce4, 'libpolicy_component.cr.so'),
+  (0x0bec6000, 0x0bf5ab58, 0x00094b58, 'libpolicy_proto.cr.so'),
+  (0x0bf5b000, 0x0bf86fbc, 0x0002bfbc, 'libprefs.cr.so'),
+  (0x0bf87000, 0x0bfa5d74, 0x0001ed74, 'libprinting.cr.so'),
+  (0x0bfa6000, 0x0bfe0e80, 0x0003ae80, 'libprotobuf_lite.cr.so'),
+  (0x0bfe1000, 0x0bff0a18, 0x0000fa18, 'libproxy_config.cr.so'),
+  (0x0bff1000, 0x0c0f6654, 0x00105654, 'libpublic.cr.so'),
+  (0x0c0f7000, 0x0c0fa6a4, 0x000036a4, 'librange.cr.so'),
+  (0x0c0fb000, 0x0c118058, 0x0001d058, 'libraster.cr.so'),
+  (0x0c119000, 0x0c133d00, 0x0001ad00, 'libresource_coordinator_cpp.cr.so'),
+  (0x0c134000, 0x0c1396a0, 0x000056a0, 'libresource_coordinator_cpp_base.cr.so'),
+  (0x0c13a000, 0x0c1973b8, 0x0005d3b8, 'libresource_coordinator_public_mojom.cr.so'),
+  (0x0c198000, 0x0c2033e8, 0x0006b3e8, 'libresource_coordinator_public_mojom_blink.cr.so'),
+  (0x0c204000, 0x0c219744, 0x00015744, 'libresource_coordinator_public_mojom_shared.cr.so'),
+  (0x0c21a000, 0x0c21e700, 0x00004700, 'libsandbox.cr.so'),
+  (0x0c21f000, 0x0c22f96c, 0x0001096c, 'libsandbox_services.cr.so'),
+  (0x0c230000, 0x0c249d58, 0x00019d58, 'libseccomp_bpf.cr.so'),
+  (0x0c24a000, 0x0c24e714, 0x00004714, 'libseccomp_starter_android.cr.so'),
+  (0x0c24f000, 0x0c4ae9f0, 0x0025f9f0, 'libservice.cr.so'),
+  (0x0c4af000, 0x0c4c3ae4, 0x00014ae4, 'libservice_manager_cpp.cr.so'),
+  (0x0c4c4000, 0x0c4cb708, 0x00007708, 'libservice_manager_cpp_types.cr.so'),
+  (0x0c4cc000, 0x0c4fbe30, 0x0002fe30, 'libservice_manager_mojom.cr.so'),
+  (0x0c4fc000, 0x0c532e78, 0x00036e78, 'libservice_manager_mojom_blink.cr.so'),
+  (0x0c533000, 0x0c53669c, 0x0000369c, 'libservice_manager_mojom_constants.cr.so'),
+  (0x0c537000, 0x0c53e85c, 0x0000785c, 'libservice_manager_mojom_constants_blink.cr.so'),
+  (0x0c53f000, 0x0c542668, 0x00003668, 'libservice_manager_mojom_constants_shared.cr.so'),
+  (0x0c543000, 0x0c54d700, 0x0000a700, 'libservice_manager_mojom_shared.cr.so'),
+  (0x0c54e000, 0x0c8fc6ec, 0x003ae6ec, 'libsessions.cr.so'),
+  (0x0c8fd000, 0x0c90a924, 0x0000d924, 'libshared_memory_support.cr.so'),
+  (0x0c90b000, 0x0c9148ec, 0x000098ec, 'libshell_dialogs.cr.so'),
+  (0x0c915000, 0x0cf8de70, 0x00678e70, 'libskia.cr.so'),
+  (0x0cf8e000, 0x0cf978bc, 0x000098bc, 'libsnapshot.cr.so'),
+  (0x0cf98000, 0x0cfb7d9c, 0x0001fd9c, 'libsql.cr.so'),
+  (0x0cfb8000, 0x0cfbe744, 0x00006744, 'libstartup_tracing.cr.so'),
+  (0x0cfbf000, 0x0d19b4e4, 0x001dc4e4, 'libstorage_browser.cr.so'),
+  (0x0d19c000, 0x0d2a773c, 0x0010b73c, 'libstorage_common.cr.so'),
+  (0x0d2a8000, 0x0d2ac6fc, 0x000046fc, 'libsurface.cr.so'),
+  (0x0d2ad000, 0x0d2baa98, 0x0000da98, 'libtracing.cr.so'),
+  (0x0d2bb000, 0x0d2f36b0, 0x000386b0, 'libtracing_cpp.cr.so'),
+  (0x0d2f4000, 0x0d326e70, 0x00032e70, 'libtracing_mojom.cr.so'),
+  (0x0d327000, 0x0d33270c, 0x0000b70c, 'libtracing_mojom_shared.cr.so'),
+  (0x0d333000, 0x0d46d804, 0x0013a804, 'libui_android.cr.so'),
+  (0x0d46e000, 0x0d4cb3f8, 0x0005d3f8, 'libui_base.cr.so'),
+  (0x0d4cc000, 0x0d4dbc40, 0x0000fc40, 'libui_base_ime.cr.so'),
+  (0x0d4dc000, 0x0d4e58d4, 0x000098d4, 'libui_data_pack.cr.so'),
+  (0x0d4e6000, 0x0d51d1e0, 0x000371e0, 'libui_devtools.cr.so'),
+  (0x0d51e000, 0x0d52b984, 0x0000d984, 'libui_message_center_cpp.cr.so'),
+  (0x0d52c000, 0x0d539a48, 0x0000da48, 'libui_touch_selection.cr.so'),
+  (0x0d53a000, 0x0d55bc60, 0x00021c60, 'liburl.cr.so'),
+  (0x0d55c000, 0x0d55f6b4, 0x000036b4, 'liburl_ipc.cr.so'),
+  (0x0d560000, 0x0d5af110, 0x0004f110, 'liburl_matcher.cr.so'),
+  (0x0d5b0000, 0x0d5e2fac, 0x00032fac, 'libuser_manager.cr.so'),
+  (0x0d5e3000, 0x0d5e66e4, 0x000036e4, 'libuser_prefs.cr.so'),
+  (0x0d5e7000, 0x0e3e1cc8, 0x00dfacc8, 'libv8.cr.so'),
+  (0x0e3e2000, 0x0e400ae0, 0x0001eae0, 'libv8_libbase.cr.so'),
+  (0x0e401000, 0x0e4d91d4, 0x000d81d4, 'libviz_common.cr.so'),
+  (0x0e4da000, 0x0e4df7e4, 0x000057e4, 'libviz_resource_format.cr.so'),
+  (0x0e4e0000, 0x0e5b7120, 0x000d7120, 'libweb_dialogs.cr.so'),
+  (0x0e5b8000, 0x0e5c7a18, 0x0000fa18, 'libwebdata_common.cr.so'),
+  (0x0e5c8000, 0x0e61bfe4, 0x00053fe4, 'libwtf.cr.so'),
+]
+
+
+# A small memory map fragment extracted from a tombstone for a process that
+# had loaded the APK corresponding to _TEST_APK_LIBS above.
+_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw-         0     cb000  /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw-     cb000    400000  /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff ---    4cb000  1fb35000  /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw-         0      1000  /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff ---      1000  1ffff000  /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw-         0    9d9000  /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r--         0   1eb2000  /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x   1eb2000   1cfc000  /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw-   3bae000      1000  /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw-         0     dc000  /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw-         0      1000  /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw-      1000     11000  /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff ---     12000   3b13000  /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw-   3b25000    3ff000  /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r--    8a9000    18f000  /data/app/com.example.app-2/base.apk
+92539000-9255bfff r--         0     23000  /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r--    213000     38000  /data/app/com.example.app-2/base.apk
+92594000-925c0fff r--    87d000     2d000  /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r--    a37000    213000  /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r--    24a000    634000  /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r--   a931000     30000  /data/app/com.example.app-2/base.apk
+92e38000-92e86fff r-x   a961000     4f000  /data/app/com.example.app-2/base.apk
+92e87000-92e8afff rw-   a9b0000      4000  /data/app/com.example.app-2/base.apk
+92e8b000-92e8bfff rw-         0      1000
+92e8c000-92e9dfff r--   d5b0000     12000  /data/app/com.example.app-2/base.apk
+92e9e000-92ebcfff r-x   d5c2000     1f000  /data/app/com.example.app-2/base.apk
+92ebd000-92ebefff rw-   d5e1000      2000  /data/app/com.example.app-2/base.apk
+92ebf000-92ebffff rw-         0      1000
+'''
+
+# list of (address, size, path, offset)  tuples that must appear in
+# _TEST_MEMORY_MAP. Not all sections need to be listed.
+_TEST_MEMORY_MAP_SECTIONS = [
+  (0x923aa000, 0x18f000, '/data/app/com.example.app-2/base.apk', 0x8a9000),
+  (0x9255c000, 0x038000, '/data/app/com.example.app-2/base.apk', 0x213000),
+  (0x92594000, 0x02d000, '/data/app/com.example.app-2/base.apk', 0x87d000),
+  (0x925c1000, 0x213000, '/data/app/com.example.app-2/base.apk', 0xa37000),
+]
+
+_EXPECTED_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw-         0     cb000  /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw-     cb000    400000  /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff ---    4cb000  1fb35000  /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw-         0      1000  /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff ---      1000  1ffff000  /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw-         0    9d9000  /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r--         0   1eb2000  /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x   1eb2000   1cfc000  /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw-   3bae000      1000  /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw-         0     dc000  /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw-         0      1000  /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw-      1000     11000  /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff ---     12000   3b13000  /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw-   3b25000    3ff000  /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r--    8a9000    18f000  /data/app/com.example.app-2/base.apk
+92539000-9255bfff r--         0     23000  /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r--    213000     38000  /data/app/com.example.app-2/base.apk
+92594000-925c0fff r--    87d000     2d000  /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r--    a37000    213000  /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r--    24a000    634000  /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r--   a931000     30000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x0)
+92e38000-92e86fff r-x   a961000     4f000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x30000)
+92e87000-92e8afff rw-   a9b0000      4000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x7f000)
+92e8b000-92e8bfff rw-         0      1000
+92e8c000-92e9dfff r--   d5b0000     12000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x0)
+92e9e000-92ebcfff r-x   d5c2000     1f000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x12000)
+92ebd000-92ebefff rw-   d5e1000      2000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x31000)
+92ebf000-92ebffff rw-         0      1000
+'''
+
+# Example stack section, taken from the same tombstone that _TEST_MEMORY_MAP
+# was extracted from.
+_TEST_STACK = r'''stack:
+        bf89a070  b7439468  /system/lib/libc.so
+        bf89a074  bf89a1e4  [stack]
+        bf89a078  932d4000  /data/app/com.example.app-2/base.apk
+        bf89a07c  b73bfbc9  /system/lib/libc.so (pthread_mutex_lock+65)
+        bf89a080  00000000
+        bf89a084  4000671c  /dev/ashmem/dalvik-main space 1 (deleted)
+        bf89a088  932d1d86  /data/app/com.example.app-2/base.apk
+        bf89a08c  b743671c  /system/lib/libc.so
+        bf89a090  b77f8c00  /system/bin/linker
+        bf89a094  b743cc90
+        bf89a098  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a09c  b73bf271  /system/lib/libc.so (__pthread_internal_find(long)+65)
+        bf89a0a0  b743cc90
+        bf89a0a4  bf89a0b0  [stack]
+        bf89a0a8  bf89a0b8  [stack]
+        bf89a0ac  00000008
+        ........  ........
+  #00  bf89a0b0  00000006
+        bf89a0b4  00000002
+        bf89a0b8  b743671c  /system/lib/libc.so
+        bf89a0bc  b73bf5d9  /system/lib/libc.so (pthread_kill+71)
+  #01  bf89a0c0  00006937
+        bf89a0c4  00006937
+        bf89a0c8  00000006
+        bf89a0cc  b77fd3a9  /system/bin/app_process32 (sigprocmask+141)
+        bf89a0d0  00000002
+        bf89a0d4  bf89a0ec  [stack]
+        bf89a0d8  00000000
+        bf89a0dc  b743671c  /system/lib/libc.so
+        bf89a0e0  bf89a12c  [stack]
+        bf89a0e4  bf89a1e4  [stack]
+        bf89a0e8  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a0ec  b7365206  /system/lib/libc.so (raise+37)
+  #02  bf89a0f0  b77f8c00  /system/bin/linker
+        bf89a0f4  00000006
+        bf89a0f8  b7439468  /system/lib/libc.so
+        bf89a0fc  b743671c  /system/lib/libc.so
+        bf89a100  bf89a12c  [stack]
+        bf89a104  b743671c  /system/lib/libc.so
+        bf89a108  bf89a12c  [stack]
+        bf89a10c  b735e9e5  /system/lib/libc.so (abort+81)
+  #03  bf89a110  00000006
+        bf89a114  bf89a12c  [stack]
+        bf89a118  00000000
+        bf89a11c  b55a3d3b  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+        bf89a120  b7439468  /system/lib/libc.so
+        bf89a124  b55ba38d  /system/lib/libprotobuf-cpp-lite.so
+        bf89a128  b55ba408  /system/lib/libprotobuf-cpp-lite.so
+        bf89a12c  ffffffdf
+        bf89a130  0000003d
+        bf89a134  adfedf00  [anon:libc_malloc]
+        bf89a138  bf89a158  [stack]
+  #04  bf89a13c  a0cee7f0  /data/app/com.example.app-2/base.apk
+        bf89a140  b55c1cb0  /system/lib/libprotobuf-cpp-lite.so
+        bf89a144  bf89a1e4  [stack]
+'''
+
+# Expected value of _TEST_STACK after translation of addresses in the APK
+# into offsets into libraries.
+_EXPECTED_STACK = r'''stack:
+        bf89a070  b7439468  /system/lib/libc.so
+        bf89a074  bf89a1e4  [stack]
+        bf89a078  932d4000  /data/app/com.example.app-2/base.apk
+        bf89a07c  b73bfbc9  /system/lib/libc.so (pthread_mutex_lock+65)
+        bf89a080  00000000
+        bf89a084  4000671c  /dev/ashmem/dalvik-main space 1 (deleted)
+        bf89a088  932d1d86  /data/app/com.example.app-2/base.apk
+        bf89a08c  b743671c  /system/lib/libc.so
+        bf89a090  b77f8c00  /system/bin/linker
+        bf89a094  b743cc90
+        bf89a098  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a09c  b73bf271  /system/lib/libc.so (__pthread_internal_find(long)+65)
+        bf89a0a0  b743cc90
+        bf89a0a4  bf89a0b0  [stack]
+        bf89a0a8  bf89a0b8  [stack]
+        bf89a0ac  00000008
+        ........  ........
+  #00  bf89a0b0  00000006
+        bf89a0b4  00000002
+        bf89a0b8  b743671c  /system/lib/libc.so
+        bf89a0bc  b73bf5d9  /system/lib/libc.so (pthread_kill+71)
+  #01  bf89a0c0  00006937
+        bf89a0c4  00006937
+        bf89a0c8  00000006
+        bf89a0cc  b77fd3a9  /system/bin/app_process32 (sigprocmask+141)
+        bf89a0d0  00000002
+        bf89a0d4  bf89a0ec  [stack]
+        bf89a0d8  00000000
+        bf89a0dc  b743671c  /system/lib/libc.so
+        bf89a0e0  bf89a12c  [stack]
+        bf89a0e4  bf89a1e4  [stack]
+        bf89a0e8  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a0ec  b7365206  /system/lib/libc.so (raise+37)
+  #02  bf89a0f0  b77f8c00  /system/bin/linker
+        bf89a0f4  00000006
+        bf89a0f8  b7439468  /system/lib/libc.so
+        bf89a0fc  b743671c  /system/lib/libc.so
+        bf89a100  bf89a12c  [stack]
+        bf89a104  b743671c  /system/lib/libc.so
+        bf89a108  bf89a12c  [stack]
+        bf89a10c  b735e9e5  /system/lib/libc.so (abort+81)
+  #03  bf89a110  00000006
+        bf89a114  bf89a12c  [stack]
+        bf89a118  00000000
+        bf89a11c  b55a3d3b  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+        bf89a120  b7439468  /system/lib/libc.so
+        bf89a124  b55ba38d  /system/lib/libprotobuf-cpp-lite.so
+        bf89a128  b55ba408  /system/lib/libprotobuf-cpp-lite.so
+        bf89a12c  ffffffdf
+        bf89a130  0000003d
+        bf89a134  adfedf00  [anon:libc_malloc]
+        bf89a138  bf89a158  [stack]
+  #04  bf89a13c  a0cee7f0  /data/app/com.example.app-2/base.apk
+        bf89a140  b55c1cb0  /system/lib/libprotobuf-cpp-lite.so
+        bf89a144  bf89a1e4  [stack]
+'''
+
+_TEST_BACKTRACE = r'''backtrace:
+    #00 pc 00084126  /system/lib/libc.so (tgkill+22)
+    #01 pc 000815d8  /system/lib/libc.so (pthread_kill+70)
+    #02 pc 00027205  /system/lib/libc.so (raise+36)
+    #03 pc 000209e4  /system/lib/libc.so (abort+80)
+    #04 pc 0000cf73  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+    #05 pc 0000cf8e  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+    #06 pc 0000d27f  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+    #07 pc 007cd236  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #08 pc 000111a9  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #09 pc 00013228  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #10 pc 000131de  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #11 pc 007cd2d8  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #12 pc 007cd956  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #13 pc 007c2d4a  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #14 pc 009fc9f1  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #15 pc 009fc8ea  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #16 pc 00561c63  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #17 pc 0106fbdb  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #18 pc 004d7371  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #19 pc 004d8159  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #20 pc 004d7b96  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #21 pc 004da4b6  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #22 pc 005ab66c  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #23 pc 005afca2  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #24 pc 0000cae8  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #25 pc 00ce864f  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #26 pc 00ce8dfa  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #27 pc 00ce74c6  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #28 pc 00004616  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x961e000)
+    #29 pc 00ce8215  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #30 pc 0013d8c7  /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+    #31 pc 00137c52  /system/lib/libart.so (art_quick_invoke_static_stub+418)
+    #32 pc 00143651  /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+    #33 pc 005e06ae  /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+    #34 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #35 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #36 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #37 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #38 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #39 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #40 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #41 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #42 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #43 pc 0032ebf9  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+    #44 pc 000fc955  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+    #45 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #46 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #47 pc 0033090c  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+    #48 pc 000fc67f  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+    #49 pc 00300700  /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+    #50 pc 00667c73  /system/lib/libart.so (artQuickToInterpreterBridge+808)
+    #51 pc 0013d98d  /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+    #52 pc 7264bc5b  /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE = r'''backtrace:
+    #00 pc 00084126  /system/lib/libc.so (tgkill+22)
+    #01 pc 000815d8  /system/lib/libc.so (pthread_kill+70)
+    #02 pc 00027205  /system/lib/libc.so (raise+36)
+    #03 pc 000209e4  /system/lib/libc.so (abort+80)
+    #04 pc 0000cf73  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+    #05 pc 0000cf8e  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+    #06 pc 0000d27f  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+    #07 pc 007cd236  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #08 pc 000111a9  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #09 pc 00013228  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #10 pc 000131de  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #11 pc 007cd2d8  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #12 pc 007cd956  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #13 pc 007c2d4a  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #14 pc 009fc9f1  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #15 pc 009fc8ea  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #16 pc 00561c63  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #17 pc 0106fbdb  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #18 pc 004d7371  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #19 pc 004d8159  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #20 pc 004d7b96  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #21 pc 004da4b6  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #22 pc 005ab66c  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #23 pc 005afca2  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #24 pc 0000cae8  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #25 pc 00ce864f  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #26 pc 00ce8dfa  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #27 pc 00ce74c6  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #28 pc 00004616  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so (offset 0x28000)
+    #29 pc 00ce8215  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #30 pc 0013d8c7  /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+    #31 pc 00137c52  /system/lib/libart.so (art_quick_invoke_static_stub+418)
+    #32 pc 00143651  /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+    #33 pc 005e06ae  /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+    #34 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #35 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #36 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #37 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #38 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #39 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #40 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #41 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #42 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #43 pc 0032ebf9  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+    #44 pc 000fc955  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+    #45 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #46 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #47 pc 0033090c  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+    #48 pc 000fc67f  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+    #49 pc 00300700  /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+    #50 pc 00667c73  /system/lib/libart.so (artQuickToInterpreterBridge+808)
+    #51 pc 0013d98d  /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+    #52 pc 7264bc5b  /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE_OFFSETS_MAP = {
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so':
+      set([
+          0x1c000 + 0x111a9,
+          0x1c000 + 0x13228,
+          0x1c000 + 0x131de,
+      ]),
+
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so':
+      set([
+          0x90e000 + 0x7cd236,
+          0x90e000 + 0x7cd2d8,
+          0x90e000 + 0x7cd956,
+          0x90e000 + 0x7c2d4a,
+          0x90e000 + 0x9fc9f1,
+          0x90e000 + 0x9fc8ea,
+          0x90e000 + 0x561c63,
+          0x90e000 + 0x106fbdb,
+          0x90e000 + 0x4d7371,
+          0x90e000 + 0x4d8159,
+          0x90e000 + 0x4d7b96,
+          0x90e000 + 0x4da4b6,
+          0x90e000 + 0xcae8,
+      ]),
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so':
+      set([
+          0xc2d000 + 0x5ab66c,
+          0xc2d000 + 0x5afca2,
+          0xc2d000 + 0xce864f,
+          0xc2d000 + 0xce8dfa,
+          0xc2d000 + 0xce74c6,
+          0xc2d000 + 0xce8215,
+      ]),
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so':
+      set([
+          0x28000 + 0x4616,
+      ])
+}
+
+# pylint: enable=line-too-long
+
+_ONE_MB = 1024 * 1024
+_TEST_SYMBOL_DATA = {
+  # Regular symbols
+  0: 'mock_sym_for_addr_0 [mock_src/libmock1.so.c:0]',
+  0x1000: 'mock_sym_for_addr_4096 [mock_src/libmock1.so.c:4096]',
+
+  # Symbols without source file path.
+  _ONE_MB: 'mock_sym_for_addr_1048576 [??:0]',
+  _ONE_MB + 0x8234: 'mock_sym_for_addr_1081908 [??:0]',
+
+  # Unknown symbol.
+  2 * _ONE_MB: '?? [??:0]',
+
+  # Inlined symbol.
+  3 * _ONE_MB:
+    'mock_sym_for_addr_3145728_inner [mock_src/libmock1.so.c:3145728]',
+}
+
+@contextlib.contextmanager
+def _TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def _TouchFile(path):
+  # Create parent directories.
+  try:
+    os.makedirs(os.path.dirname(path))
+  except OSError:
+    pass
+  with open(path, 'a'):
+    os.utime(path, None)
+
+class MockApkTranslator(object):
+  """A mock ApkLibraryPathTranslator object used for testing."""
+
+  # Regex that matches the content of APK native library map files generated
+  # with apk_lib_dump.py.
+  _RE_MAP_FILE = re.compile(
+      r'0x(?P<file_start>[0-9a-f]+)\s+' +
+      r'0x(?P<file_end>[0-9a-f]+)\s+' +
+      r'0x(?P<file_size>[0-9a-f]+)\s+' +
+      r'0x(?P<lib_path>[0-9a-f]+)\s+')
+
+  def __init__(self, test_apk_libs=None):
+    """Initialize instance.
+
+    Args:
+      test_apk_libs: Optional list of (file_start, file_end, size, lib_path)
+        tuples, like _TEST_APK_LIBS for example. This will be used to
+        implement TranslatePath().
+    """
+    self._apk_libs = []
+    if test_apk_libs:
+      self._AddLibEntries(test_apk_libs)
+
+  def _AddLibEntries(self, entries):
+    self._apk_libs = sorted(self._apk_libs + entries,
+                            lambda x, y: cmp(x[0], y[0]))
+
+  def ReadMapFile(self, file_path):
+    """Read an .apk.native-libs file that was produced with apk_lib_dump.py.
+
+    Args:
+      file_path: input path to .apk.native-libs file. Its format is
+        essentially: 0x<start>  0x<end> 0x<size> <library-path>
+    """
+    new_libs = []
+    with open(file_path) as f:
+      for line in f.readlines():
+        m = MockApkTranslator._RE_MAP_FILE.match(line)
+        if m:
+          file_start = int(m.group('file_start'), 16)
+          file_end = int(m.group('file_end'), 16)
+          file_size = int(m.group('file_size'), 16)
+          lib_path = m.group('lib_path')
+          # Sanity check
+          if file_start + file_size != file_end:
+            logging.warning('%s: Inconsistent (start, end, size) values '
+                            '(0x%x, 0x%x, 0x%x)',
+                            file_path, file_start, file_end, file_size)
+          else:
+            new_libs.append((file_start, file_end, file_size, lib_path))
+
+    self._AddLibEntries(new_libs)
+
+  def TranslatePath(self, lib_path, lib_offset):
+    """Translate an APK file path + offset into a library path + offset."""
+    min_pos = 0
+    max_pos = len(self._apk_libs)
+    while min_pos < max_pos:
+      mid_pos = (min_pos + max_pos) / 2
+      mid_entry = self._apk_libs[mid_pos]
+      mid_offset = mid_entry[0]
+      mid_size = mid_entry[2]
+      if lib_offset < mid_offset:
+        max_pos = mid_pos
+      elif lib_offset >= mid_offset + mid_size:
+        min_pos = mid_pos + 1
+      else:
+        # Found it
+        new_path = '%s!lib/%s' % (lib_path, mid_entry[3])
+        new_offset = lib_offset - mid_offset
+        return (new_path, new_offset)
+
+    return lib_path, lib_offset
+
+
+class HostLibraryFinderTest(unittest.TestCase):
+
+  def testEmpty(self):
+    finder = symbol_utils.HostLibraryFinder()
+    self.assertIsNone(finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+    self.assertIsNone(
+        finder.Find('/data/data/com.example.app-1/base.apk!lib/libfoo.so'))
+
+
+  def testSimpleDirectory(self):
+    finder = symbol_utils.HostLibraryFinder()
+    with _TempDir() as tmp_dir:
+      host_libfoo_path = os.path.join(tmp_dir, 'libfoo.so')
+      host_libbar_path = os.path.join(tmp_dir, 'libbar.so')
+      _TouchFile(host_libfoo_path)
+      _TouchFile(host_libbar_path)
+
+      finder.AddSearchDir(tmp_dir)
+
+      # Regular library path (extracted at installation by the PackageManager).
+      # Note that the extraction path has changed between Android releases,
+      # i.e. it can be /data/app/, /data/data/ or /data/app-lib/ depending
+      # on the system.
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/app-lib/com.example.app-1/lib/libfoo.so'))
+
+      # Verify that the path doesn't really matter
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/whatever/what.apk!lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libbar_path,
+          finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+      self.assertIsNone(
+          finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+  def testMultipleDirectories(self):
+    with _TempDir() as tmp_dir:
+      # Create the following files:
+      #   <tmp_dir>/aaa/
+      #      libfoo.so
+      #   <tmp_dir>/bbb/
+      #      libbar.so
+      #      libfoo.so    (this one should never be seen because 'aaa'
+      #                    shall be first in the search path list).
+      #
+      aaa_dir = os.path.join(tmp_dir, 'aaa')
+      bbb_dir = os.path.join(tmp_dir, 'bbb')
+      os.makedirs(aaa_dir)
+      os.makedirs(bbb_dir)
+
+      host_libfoo_path = os.path.join(aaa_dir, 'libfoo.so')
+      host_libbar_path = os.path.join(bbb_dir, 'libbar.so')
+      host_libfoo2_path = os.path.join(bbb_dir, 'libfoo.so')
+
+      _TouchFile(host_libfoo_path)
+      _TouchFile(host_libbar_path)
+      _TouchFile(host_libfoo2_path)
+
+      finder = symbol_utils.HostLibraryFinder()
+      finder.AddSearchDir(aaa_dir)
+      finder.AddSearchDir(bbb_dir)
+
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/whatever/base.apk!lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libbar_path,
+          finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+      self.assertIsNone(
+          finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+class ElfSymbolResolverTest(unittest.TestCase):
+
+  def testCreation(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    self.assertTrue(resolver)
+
+  def testWithSimpleOffsets(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    resolver.SetAndroidAbi('ignored-abi')
+
+    for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+      self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr),
+                       expected_sym)
+
+  def testWithPreResolvedSymbols(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    resolver.SetAndroidAbi('ignored-abi')
+    resolver.AddLibraryOffsets('/some/path/libmock1.so',
+                               _TEST_SYMBOL_DATA.keys())
+
+    resolver.DisallowSymbolizerForTesting()
+
+    for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+      sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr)
+      self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr)
+      self.assertEqual(
+          sym_info, expected_sym,
+          'Invalid symbol info for addr %x [%s] expected [%s]' % (
+              addr, sym_info, expected_sym))
+
+
+class MemoryMapTest(unittest.TestCase):
+
+  def testCreation(self):
+    mem_map = symbol_utils.MemoryMap('test-abi32')
+    self.assertIsNone(mem_map.FindSectionForAddress(0))
+
+  def testParseLines(self):
+    mem_map = symbol_utils.MemoryMap('test-abi32')
+    mem_map.ParseLines(_TEST_MEMORY_MAP.splitlines())
+    for exp_addr, exp_size, exp_path, exp_offset in _TEST_MEMORY_MAP_SECTIONS:
+      text = '(addr:%x, size:%x, path:%s, offset=%x)' % (
+          exp_addr, exp_size, exp_path, exp_offset)
+
+      t = mem_map.FindSectionForAddress(exp_addr)
+      self.assertTrue(t, 'Could not find %s' % text)
+      self.assertEqual(t.address, exp_addr)
+      self.assertEqual(t.size, exp_size)
+      self.assertEqual(t.offset, exp_offset)
+      self.assertEqual(t.path, exp_path)
+
+  def testTranslateLine(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    mem_map = symbol_utils.MemoryMap(android_abi)
+    for line, expected_line in zip(_TEST_MEMORY_MAP.splitlines(),
+                                   _EXPECTED_TEST_MEMORY_MAP.splitlines()):
+      self.assertEqual(mem_map.TranslateLine(line, apk_translator),
+                       expected_line)
+
+class StackTranslatorTest(unittest.TestCase):
+
+  def testSimpleStack(self):
+    android_abi = 'test-abi32'
+    mem_map = symbol_utils.MemoryMap(android_abi)
+    mem_map.ParseLines(_TEST_MEMORY_MAP)
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    stack_translator = symbol_utils.StackTranslator(android_abi, mem_map,
+                                                    apk_translator)
+    input_stack = _TEST_STACK.splitlines()
+    expected_stack = _EXPECTED_STACK.splitlines()
+    self.assertEqual(len(input_stack), len(expected_stack))
+    for stack_line, expected_line in zip(input_stack, expected_stack):
+      new_line = stack_translator.TranslateLine(stack_line)
+      self.assertEqual(new_line, expected_line)
+
+
+class MockSymbolResolver(symbol_utils.SymbolResolver):
+
+  # A regex matching a symbol definition as it appears in a test symbol file.
+  # Format is: <hex-offset> <whitespace> <symbol-string>
+  _RE_SYMBOL_DEFINITION = re.compile(
+      r'(?P<offset>[0-9a-f]+)\s+(?P<symbol>.*)')
+
+  def __init__(self):
+    super(MockSymbolResolver, self).__init__()
+    self._map = collections.defaultdict(dict)
+
+  def AddTestLibrarySymbols(self, lib_name, offsets_map):
+    """Add a new test entry for a given library name.
+
+    Args:
+      lib_name: Library name (e.g. 'libfoo.so')
+      offsets_map: A mapping from offsets to symbol info strings.
+    """
+    self._map[lib_name] = offsets_map
+
+  def ReadTestFile(self, file_path, lib_name):
+    """Read a single test symbol file, matching a given library.
+
+    Args:
+      file_path: Input file path.
+      lib_name: Library name these symbols correspond to (e.g. 'libfoo.so')
+    """
+    with open(file_path) as f:
+      for line in f.readlines():
+        line = line.rstrip()
+        m = MockSymbolResolver._RE_SYMBOL_DEFINITION.match(line)
+        if m:
+          offset = int(m.group('offset'))
+          symbol = m.group('symbol')
+          self._map[lib_name][offset] = symbol
+
+  def ReadTestFilesInDir(self, dir_path, file_suffix):
+    """Read all symbol test files in a given directory.
+
+    Args:
+      dir_path: Directory path.
+      file_suffix: File suffix used to detect test symbol files.
+    """
+    for filename in os.listdir(dir_path):
+      if filename.endswith(file_suffix):
+        lib_name = filename[:-len(file_suffix)]
+        self.ReadTestFile(os.path.join(dir_path, filename), lib_name)
+
+  def FindSymbolInfo(self, device_path, device_offset):
+    """Implement SymbolResolver.FindSymbolInfo."""
+    lib_name = os.path.basename(device_path)
+    offsets = self._map.get(lib_name)
+    if not offsets:
+      return None
+
+    return offsets.get(device_offset)
+
+
+class BacktraceTranslatorTest(unittest.TestCase):
+
+  def testEmpty(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator()
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    self.assertTrue(backtrace_translator)
+
+  def testFindLibraryOffsets(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    input_backtrace = _EXPECTED_BACKTRACE.splitlines()
+    expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP
+    offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace)
+    for lib_path, offsets in offset_map.iteritems():
+      self.assertTrue(lib_path in expected_lib_offsets_map,
+                      '%s is not in expected library-offsets map!' % lib_path)
+      sorted_offsets = sorted(offsets)
+      sorted_expected_offsets = sorted(expected_lib_offsets_map[lib_path])
+      self.assertEqual(sorted_offsets, sorted_expected_offsets,
+                       '%s has invalid offsets %s expected %s' % (
+                          lib_path, sorted_offsets, sorted_expected_offsets))
+
+  def testTranslateLine(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    input_backtrace = _TEST_BACKTRACE.splitlines()
+    expected_backtrace = _EXPECTED_BACKTRACE.splitlines()
+    self.assertEqual(len(input_backtrace), len(expected_backtrace))
+    for trace_line, expected_line in zip(input_backtrace, expected_backtrace):
+      line = backtrace_translator.TranslateLine(trace_line,
+                                                MockSymbolResolver())
+      self.assertEqual(line, expected_line)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/__init__.py b/src/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/utils/__init__.py
diff --git a/src/build/android/pylib/utils/app_bundle_utils.py b/src/build/android/pylib/utils/app_bundle_utils.py
new file mode 100644
index 0000000..b2e9927
--- /dev/null
+++ b/src/build/android/pylib/utils/app_bundle_utils.py
@@ -0,0 +1,165 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import re
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+
+from util import build_utils
+from util import md5_check
+from util import resource_utils
+import bundletool
+
+# List of valid modes for GenerateBundleApks()
+BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE',
+                        'TEXTURE_COMPRESSION_FORMAT')
+_SYSTEM_MODES = ('system_compressed', 'system')
+
+_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
+
+
+def _CreateDeviceSpec(bundle_path, sdk_version, locales):
+  if not sdk_version:
+    manifest_data = bundletool.RunBundleTool(
+        ['dump', 'manifest', '--bundle', bundle_path])
+    sdk_version = int(
+        re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
+
+  # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
+  # files from being created within the .apks file.
+  return {
+      'screenDensity': 1000,  # Ignored since we don't split on density.
+      'sdkVersion': sdk_version,
+      'supportedAbis': _ALL_ABIS,  # Our .aab files are already split on abi.
+      'supportedLocales': locales,
+  }
+
+
+def GenerateBundleApks(bundle_path,
+                       bundle_apks_path,
+                       aapt2_path,
+                       keystore_path,
+                       keystore_password,
+                       keystore_alias,
+                       mode=None,
+                       minimal=False,
+                       minimal_sdk_version=None,
+                       check_for_noop=True,
+                       system_image_locales=None,
+                       optimize_for=None):
+  """Generate an .apks archive from a an app bundle if needed.
+
+  Args:
+    bundle_path: Input bundle file path.
+    bundle_apks_path: Output bundle .apks archive path. Name must end with
+      '.apks' or this operation will fail.
+    aapt2_path: Path to aapt2 build tool.
+    keystore_path: Path to keystore.
+    keystore_password: Keystore password, as a string.
+    keystore_alias: Keystore signing key alias.
+    mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
+    minimal: Create the minimal set of apks possible (english-only).
+    minimal_sdk_version: Use this sdkVersion when |minimal| or
+      |system_image_locales| args are present.
+    check_for_noop: Use md5_check to short-circuit when inputs have not changed.
+    system_image_locales: Locales to package in the APK when mode is "system"
+      or "system_compressed".
+    optimize_for: Overrides split configuration, which must be None or
+      one of OPTIMIZE_FOR_OPTIONS.
+  """
+  device_spec = None
+  if minimal_sdk_version:
+    assert minimal or system_image_locales, (
+        'minimal_sdk_version is only used when minimal or system_image_locales '
+        'is specified')
+  if minimal:
+    # Measure with one language split installed. Use Hindi because it is
+    # popular. resource_size.py looks for splits/base-hi.apk.
+    # Note: English is always included since it's in base-master.apk.
+    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
+  elif mode in _SYSTEM_MODES:
+    if not system_image_locales:
+      raise Exception('system modes require system_image_locales')
+    # Bundletool doesn't seem to understand device specs with locales in the
+    # form of "<lang>-r<region>", so just provide the language code instead.
+    locales = [
+        resource_utils.ToAndroidLocaleName(l).split('-')[0]
+        for l in system_image_locales
+    ]
+    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
+
+  def rebuild():
+    logging.info('Building %s', bundle_apks_path)
+    with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file:
+      cmd_args = [
+          'build-apks',
+          '--aapt2=%s' % aapt2_path,
+          '--output=%s' % tmp_apks_file.name,
+          '--bundle=%s' % bundle_path,
+          '--ks=%s' % keystore_path,
+          '--ks-pass=pass:%s' % keystore_password,
+          '--ks-key-alias=%s' % keystore_alias,
+          '--overwrite',
+      ]
+
+      if mode is not None:
+        if mode not in BUILD_APKS_MODES:
+          raise Exception('Invalid mode parameter %s (should be in %s)' %
+                          (mode, BUILD_APKS_MODES))
+        cmd_args += ['--mode=' + mode]
+
+      if optimize_for:
+        if optimize_for not in OPTIMIZE_FOR_OPTIONS:
+          raise Exception('Invalid optimize_for parameter %s '
+                          '(should be in %s)' %
+                          (mode, OPTIMIZE_FOR_OPTIONS))
+        cmd_args += ['--optimize-for=' + optimize_for]
+
+      with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as spec_file:
+        if device_spec:
+          json.dump(device_spec, spec_file)
+          spec_file.flush()
+          cmd_args += ['--device-spec=' + spec_file.name]
+        bundletool.RunBundleTool(cmd_args)
+
+      # Make the resulting .apks file hermetic.
+      with build_utils.TempDir() as temp_dir, \
+        build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
+        files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir)
+        build_utils.DoZip(files, f, base_dir=temp_dir)
+
+  if check_for_noop:
+    # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
+    # input_paths, to speed up MD5 computations by about 400ms (the .jar file
+    # contains thousands of class files which are checked independently,
+    # resulting in an .md5.stamp of more than 60000 lines!).
+    input_paths = [bundle_path, aapt2_path, keystore_path]
+    input_strings = [
+        keystore_password,
+        keystore_alias,
+        bundletool.BUNDLETOOL_JAR_PATH,
+        # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
+        # it's simpler to assume that this may not be the case in the future.
+        bundletool.BUNDLETOOL_VERSION,
+        device_spec,
+    ]
+    if mode is not None:
+      input_strings.append(mode)
+
+    # Avoid rebuilding (saves ~20s) when the input files have not changed. This
+    # is essential when calling the apk_operations.py script multiple times with
+    # the same bundle (e.g. out/Debug/bin/monochrome_public_bundle run).
+    md5_check.CallAndRecordIfStale(
+        rebuild,
+        input_paths=input_paths,
+        input_strings=input_strings,
+        output_paths=[bundle_apks_path])
+  else:
+    rebuild()
diff --git a/src/build/android/pylib/utils/argparse_utils.py b/src/build/android/pylib/utils/argparse_utils.py
new file mode 100644
index 0000000..06544a2
--- /dev/null
+++ b/src/build/android/pylib/utils/argparse_utils.py
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+
+
+class CustomHelpAction(argparse.Action):
+  '''Allows defining custom help actions.
+
+  Help actions can run even when the parser would otherwise fail on missing
+  arguments. The first help or custom help command mentioned on the command
+  line will have its help text displayed.
+
+  Usage:
+      parser = argparse.ArgumentParser(...)
+      CustomHelpAction.EnableFor(parser)
+      parser.add_argument('--foo-help',
+                          action='custom_help',
+                          custom_help_text='this is the help message',
+                          help='What this helps with')
+  '''
+  # Derived from argparse._HelpAction from
+  # https://github.com/python/cpython/blob/master/Lib/argparse.py
+
+  # pylint: disable=redefined-builtin
+  # (complains about 'help' being redefined)
+  def __init__(self,
+               option_strings,
+               dest=argparse.SUPPRESS,
+               default=argparse.SUPPRESS,
+               custom_help_text=None,
+               help=None):
+    super(CustomHelpAction, self).__init__(option_strings=option_strings,
+                                           dest=dest,
+                                           default=default,
+                                           nargs=0,
+                                           help=help)
+
+    if not custom_help_text:
+      raise ValueError('custom_help_text is required')
+    self._help_text = custom_help_text
+
+  def __call__(self, parser, namespace, values, option_string=None):
+    print(self._help_text)
+    parser.exit()
+
+  @staticmethod
+  def EnableFor(parser):
+    parser.register('action', 'custom_help', CustomHelpAction)
diff --git a/src/build/android/pylib/utils/chrome_proxy_utils.py b/src/build/android/pylib/utils/chrome_proxy_utils.py
new file mode 100644
index 0000000..149d0b9
--- /dev/null
+++ b/src/build/android/pylib/utils/chrome_proxy_utils.py
@@ -0,0 +1,171 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for setting up and tear down WPR and TsProxy service."""
+
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+from devil.android import forwarder
+
+PROXY_HOST_IP = '127.0.0.1'
+# From Catapult/WebPageReplay document.
+IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I='
+PROXY_SERVER = 'socks5://localhost'
+DEFAULT_DEVICE_PORT = 1080
+DEFAULT_ROUND_TRIP_LATENCY_MS = 100
+DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000
+DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000
+
+
+class WPRServer(object):
+  """Utils to set up a webpagereplay_go_server instance."""
+
+  def __init__(self):
+    self._archive_path = None
+    self._host_http_port = 0
+    self._host_https_port = 0
+    self._record_mode = False
+    self._server = None
+
+  def StartServer(self, wpr_archive_path):
+    """Starts a webpagereplay_go_server instance."""
+    if wpr_archive_path == self._archive_path and self._server:
+      # Reuse existing webpagereplay_go_server instance.
+      return
+
+    if self._server:
+      self.StopServer()
+
+    replay_options = []
+    if self._record_mode:
+      replay_options.append('--record')
+
+    ports = {}
+    if not self._server:
+      self._server = webpagereplay_go_server.ReplayServer(
+          wpr_archive_path,
+          PROXY_HOST_IP,
+          http_port=self._host_http_port,
+          https_port=self._host_https_port,
+          replay_options=replay_options)
+      self._archive_path = wpr_archive_path
+      ports = self._server.StartServer()
+
+    self._host_http_port = ports['http']
+    self._host_https_port = ports['https']
+
+  def StopServer(self):
+    """Stops the webpagereplay_go_server instance and resets archive."""
+    self._server.StopServer()
+    self._server = None
+    self._host_http_port = 0
+    self._host_https_port = 0
+
+  @staticmethod
+  def SetServerBinaryPath(go_binary_path):
+    """Sets the go_binary_path for webpagereplay_go_server.ReplayServer."""
+    webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path)
+
+  @property
+  def record_mode(self):
+    return self._record_mode
+
+  @record_mode.setter
+  def record_mode(self, value):
+    self._record_mode = value
+
+  @property
+  def http_port(self):
+    return self._host_http_port
+
+  @property
+  def https_port(self):
+    return self._host_https_port
+
+  @property
+  def archive_path(self):
+    return self._archive_path
+
+
+class ChromeProxySession(object):
+  """Utils to help set up a Chrome Proxy."""
+
+  def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT):
+    self._device_proxy_port = device_proxy_port
+    self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP)
+    self._wpr_server = WPRServer()
+
+  @property
+  def wpr_record_mode(self):
+    """Returns whether this proxy session was running in record mode."""
+    return self._wpr_server.record_mode
+
+  @wpr_record_mode.setter
+  def wpr_record_mode(self, value):
+    self._wpr_server.record_mode = value
+
+  @property
+  def wpr_replay_mode(self):
+    """Returns whether this proxy session was running in replay mode."""
+    return not self._wpr_server.record_mode
+
+  @property
+  def wpr_archive_path(self):
+    """Returns the wpr archive file path used in this proxy session."""
+    return self._wpr_server.archive_path
+
+  @property
+  def device_proxy_port(self):
+    return self._device_proxy_port
+
+  def GetFlags(self):
+    """Gets the chrome command line flags to be needed by ChromeProxySession."""
+    extra_flags = []
+
+    extra_flags.append('--ignore-certificate-errors-spki-list=%s' %
+                       IGNORE_CERT_ERROR_SPKI_LIST)
+    extra_flags.append('--proxy-server=%s:%s' %
+                       (PROXY_SERVER, self._device_proxy_port))
+    return extra_flags
+
+  @staticmethod
+  def SetWPRServerBinary(go_binary_path):
+    """Sets the WPR server go_binary_path."""
+    WPRServer.SetServerBinaryPath(go_binary_path)
+
+  def Start(self, device, wpr_archive_path):
+    """Starts the wpr_server as well as the ts_proxy server and setups env.
+
+    Args:
+      device: A DeviceUtils instance.
+      wpr_archive_path: A abs path to the wpr archive file.
+
+    """
+    self._wpr_server.StartServer(wpr_archive_path)
+    self._ts_proxy_server.StartServer()
+
+    # Maps device port to host port
+    forwarder.Forwarder.Map(
+        [(self._device_proxy_port, self._ts_proxy_server.port)], device)
+    # Maps tsProxy port to wpr http/https ports
+    self._ts_proxy_server.UpdateOutboundPorts(
+        http_port=self._wpr_server.http_port,
+        https_port=self._wpr_server.https_port)
+    self._ts_proxy_server.UpdateTrafficSettings(
+        round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS,
+        download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS,
+        upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS)
+
+  def Stop(self, device):
+    """Stops the wpr_server, and ts_proxy server and tears down env.
+
+    Note that Stop does not reset wpr_record_mode, wpr_replay_mode,
+    wpr_archive_path property.
+
+    Args:
+      device: A DeviceUtils instance.
+    """
+    self._wpr_server.StopServer()
+    self._ts_proxy_server.StopServer()
+    forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device)
diff --git a/src/build/android/pylib/utils/chrome_proxy_utils_test.py b/src/build/android/pylib/utils/chrome_proxy_utils_test.py
new file mode 100755
index 0000000..b38b268
--- /dev/null
+++ b/src/build/android/pylib/utils/chrome_proxy_utils_test.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for chrome_proxy_utils."""
+
+#pylint: disable=protected-access
+
+import os
+import unittest
+
+from pylib.utils import chrome_proxy_utils
+
+from devil.android import forwarder
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+import mock  # pylint: disable=import-error
+
+
+def _DeviceUtilsMock(test_serial, is_ready=True):
+  """Returns a DeviceUtils instance based on given serial."""
+  adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+  adb.__str__ = mock.Mock(return_value=test_serial)
+  adb.GetDeviceSerial.return_value = test_serial
+  adb.is_ready = is_ready
+  return device_utils.DeviceUtils(adb)
+
+
+class ChromeProxySessionTest(unittest.TestCase):
+  """Unittest for ChromeProxySession."""
+
+  #pylint: disable=no-self-use
+
+  @mock.patch.object(forwarder.Forwarder, 'Map')
+  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings')
+  @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port',
+              new_callable=mock.PropertyMock)
+  def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock,
+                 start_server_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._host_http_port = 1
+    chrome_proxy._wpr_server._host_https_port = 2
+    port_mock.return_value = 3
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.Start(device, 'abc')
+
+    forwarder_mock.assert_called_once_with([(4, 3)], device)
+    wpr_mock.assert_called_once_with('abc')
+    start_server_mock.assert_called_once()
+    outboundport_mock.assert_called_once_with(http_port=1, https_port=2)
+    traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000,
+                                                 round_trip_latency_ms=100,
+                                                 upload_bandwidth_kbps=72000)
+    port_mock.assert_called_once()
+
+  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+  def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.wpr_record_mode = True
+    chrome_proxy._wpr_server._archive_path = 'abc'
+    chrome_proxy.Stop(device)
+
+    forwarder_mock.assert_called_once_with(4, device)
+    wpr_mock.assert_called_once_with()
+    ts_proxy_mock.assert_called_once_with()
+
+  #pylint: enable=no-self-use
+
+  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+  def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, [])
+    chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__)
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.wpr_record_mode = True
+    chrome_proxy.Stop(device)
+
+    forwarder_mock.assert_called_once_with(4, device)
+    wpr_mock.assert_called_once_with()
+    ts_proxy_mock.assert_called_once_with()
+    self.assertFalse(chrome_proxy.wpr_replay_mode)
+    self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__))
+
+  def test_SetWPRRecordMode(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy.wpr_record_mode = True
+    self.assertTrue(chrome_proxy._wpr_server.record_mode)
+    self.assertTrue(chrome_proxy.wpr_record_mode)
+    self.assertFalse(chrome_proxy.wpr_replay_mode)
+
+    chrome_proxy.wpr_record_mode = False
+    self.assertFalse(chrome_proxy._wpr_server.record_mode)
+    self.assertFalse(chrome_proxy.wpr_record_mode)
+    self.assertTrue(chrome_proxy.wpr_replay_mode)
+
+  def test_SetWPRArchivePath(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._archive_path = 'abc'
+    self.assertEquals(chrome_proxy.wpr_archive_path, 'abc')
+
+  def test_UseDefaultDeviceProxyPort(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+    expected_flags = [
+        '--ignore-certificate-errors-spki-list='
+        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+        '--proxy-server=socks5://localhost:1080'
+    ]
+    self.assertEquals(chrome_proxy.device_proxy_port, 1080)
+    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+  def test_UseNewDeviceProxyPort(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(1)
+    expected_flags = [
+        '--ignore-certificate-errors-spki-list='
+        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+        '--proxy-server=socks5://localhost:1'
+    ]
+    self.assertEquals(chrome_proxy.device_proxy_port, 1)
+    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+
+class WPRServerTest(unittest.TestCase):
+  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+  def test_StartSever_fresh_replaymode(self, wpr_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_archive_file = os.path.abspath(__file__)
+    wpr_server.StartServer(wpr_archive_file)
+
+    wpr_mock.assert_called_once_with(wpr_archive_file,
+                                     '127.0.0.1',
+                                     http_port=0,
+                                     https_port=0,
+                                     replay_options=[])
+
+    self.assertEqual(wpr_server._archive_path, wpr_archive_file)
+    self.assertTrue(wpr_server._server)
+
+  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+  def test_StartSever_fresh_recordmode(self, wpr_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server.record_mode = True
+    wpr_server.StartServer(os.path.abspath(__file__))
+    wpr_archive_file = os.path.abspath(__file__)
+
+    wpr_mock.assert_called_once_with(wpr_archive_file,
+                                     '127.0.0.1',
+                                     http_port=0,
+                                     https_port=0,
+                                     replay_options=['--record'])
+
+    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+    self.assertTrue(wpr_server._server)
+
+  #pylint: disable=no-self-use
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  def test_StartSever_recordmode(self, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    start_server_mock.return_value = {'http': 1, 'https': 2}
+    wpr_server.StartServer(os.path.abspath(__file__))
+
+    start_server_mock.assert_called_once()
+    self.assertEqual(wpr_server._host_http_port, 1)
+    self.assertEqual(wpr_server._host_https_port, 2)
+    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+    self.assertTrue(wpr_server._server)
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  def test_StartSever_reuseServer(self, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server._archive_path = os.path.abspath(__file__)
+    wpr_server.StartServer(os.path.abspath(__file__))
+    start_server_mock.assert_not_called()
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server._archive_path = ''
+    wpr_server.StartServer(os.path.abspath(__file__))
+    start_server_mock.assert_called_once()
+    stop_server_mock.assert_called_once()
+
+  #pylint: enable=no-self-use
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  def test_StopServer(self, stop_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server.StopServer()
+    stop_server_mock.assert_called_once()
+    self.assertFalse(wpr_server._server)
+    self.assertFalse(wpr_server._archive_path)
+    self.assertFalse(wpr_server.http_port)
+    self.assertFalse(wpr_server.https_port)
+
+  def test_SetWPRRecordMode(self):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server.record_mode = True
+    self.assertTrue(wpr_server.record_mode)
+    wpr_server.record_mode = False
+    self.assertFalse(wpr_server.record_mode)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/decorators.py b/src/build/android/pylib/utils/decorators.py
new file mode 100644
index 0000000..8eec1d1
--- /dev/null
+++ b/src/build/android/pylib/utils/decorators.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+
+
+def Memoize(f):
+  """Decorator to cache return values of function."""
+  memoize_dict = {}
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    key = repr((args, kwargs))
+    if key not in memoize_dict:
+      memoize_dict[key] = f(*args, **kwargs)
+    return memoize_dict[key]
+  return wrapper
+
+
+def NoRaiseException(default_return_value=None, exception_message=''):
+  """Returns decorator that catches and logs uncaught Exceptions.
+
+  Args:
+    default_return_value: Value to return in the case of uncaught Exception.
+    exception_message: Message for uncaught exceptions.
+  """
+  def decorator(f):
+    @functools.wraps(f)
+    def wrapper(*args, **kwargs):
+      try:
+        return f(*args, **kwargs)
+      except Exception:  # pylint: disable=broad-except
+        logging.exception(exception_message)
+        return default_return_value
+    return wrapper
+  return decorator
diff --git a/src/build/android/pylib/utils/decorators_test.py b/src/build/android/pylib/utils/decorators_test.py
new file mode 100755
index 0000000..73a9f0d
--- /dev/null
+++ b/src/build/android/pylib/utils/decorators_test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for decorators.py."""
+
+import unittest
+
+from pylib.utils import decorators
+
+
+class NoRaiseExceptionDecoratorTest(unittest.TestCase):
+
+  def testFunctionDoesNotRaiseException(self):
+    """Tests that the |NoRaiseException| decorator catches exception."""
+
+    @decorators.NoRaiseException()
+    def raiseException():
+      raise Exception()
+
+    try:
+      raiseException()
+    except Exception:  # pylint: disable=broad-except
+      self.fail('Exception was not caught by |NoRaiseException| decorator')
+
+  def testFunctionReturnsCorrectValues(self):
+    """Tests that the |NoRaiseException| decorator returns correct values."""
+
+    @decorators.NoRaiseException(default_return_value=111)
+    def raiseException():
+      raise Exception()
+
+    @decorators.NoRaiseException(default_return_value=111)
+    def doesNotRaiseException():
+      return 999
+
+    self.assertEquals(raiseException(), 111)
+    self.assertEquals(doesNotRaiseException(), 999)
+
+
+class MemoizeDecoratorTest(unittest.TestCase):
+
+  def testFunctionExceptionNotMemoized(self):
+    """Tests that |Memoize| decorator does not cache exception results."""
+
+    class ExceptionType1(Exception):
+      pass
+
+    class ExceptionType2(Exception):
+      pass
+
+    @decorators.Memoize
+    def raiseExceptions():
+      if raiseExceptions.count == 0:
+        raiseExceptions.count += 1
+        raise ExceptionType1()
+
+      if raiseExceptions.count == 1:
+        raise ExceptionType2()
+    raiseExceptions.count = 0
+
+    with self.assertRaises(ExceptionType1):
+      raiseExceptions()
+    with self.assertRaises(ExceptionType2):
+      raiseExceptions()
+
+  def testFunctionResultMemoized(self):
+    """Tests that |Memoize| decorator caches results."""
+
+    @decorators.Memoize
+    def memoized():
+      memoized.count += 1
+      return memoized.count
+    memoized.count = 0
+
+    def notMemoized():
+      notMemoized.count += 1
+      return notMemoized.count
+    notMemoized.count = 0
+
+    self.assertEquals(memoized(), 1)
+    self.assertEquals(memoized(), 1)
+    self.assertEquals(memoized(), 1)
+
+    self.assertEquals(notMemoized(), 1)
+    self.assertEquals(notMemoized(), 2)
+    self.assertEquals(notMemoized(), 3)
+
+  def testFunctionMemoizedBasedOnArgs(self):
+    """Tests that |Memoize| caches results based on args and kwargs."""
+
+    @decorators.Memoize
+    def returnValueBasedOnArgsKwargs(a, k=0):
+      return a + k
+
+    self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2)
+    self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3)
+    self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3)
+    self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/device_dependencies.py b/src/build/android/pylib/utils/device_dependencies.py
new file mode 100644
index 0000000..9cb5bd8
--- /dev/null
+++ b/src/build/android/pylib/utils/device_dependencies.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+from pylib import constants
+
+
+_EXCLUSIONS = [
+    re.compile(r'.*OWNERS'),  # Should never be included.
+    re.compile(r'.*\.crx'),  # Chrome extension zip files.
+    re.compile(os.path.join('.*',
+                            r'\.git.*')),  # Any '.git*' directories/files.
+    re.compile(r'.*\.so'),  # Libraries packed into .apk.
+    re.compile(r'.*Mojo.*manifest\.json'),  # Some source_set()s pull these in.
+    re.compile(r'.*\.py'),  # Some test_support targets include python deps.
+    re.compile(r'.*\.apk'),  # Should be installed separately.
+    re.compile(r'.*lib.java/.*'),  # Never need java intermediates.
+
+    # Test filter files:
+    re.compile(r'.*/testing/buildbot/filters/.*'),
+
+    # Chrome external extensions config file.
+    re.compile(r'.*external_extensions\.json'),
+
+    # Exists just to test the compile, not to be run.
+    re.compile(r'.*jni_generator_tests'),
+
+    # v8's blobs and icu data get packaged into APKs.
+    re.compile(r'.*snapshot_blob.*\.bin'),
+    re.compile(r'.*icudtl.bin'),
+
+    # Scripts that are needed by swarming, but not on devices:
+    re.compile(r'.*llvm-symbolizer'),
+    re.compile(r'.*md5sum_bin'),
+    re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+
+    # Required for java deobfuscation on the host:
+    re.compile(r'.*build/android/stacktrace/.*'),
+    re.compile(r'.*third_party/jdk/.*'),
+    re.compile(r'.*third_party/proguard/.*'),
+
+    # Build artifacts:
+    re.compile(r'.*\.stamp'),
+    re.compile(r'.*.pak\.info'),
+    re.compile(r'.*\.incremental\.json'),
+]
+
+
+def _FilterDataDeps(abs_host_files):
+  exclusions = _EXCLUSIONS + [
+      re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))
+  ]
+  return [p for p in abs_host_files if not any(r.match(p) for r in exclusions)]
+
+
+def DevicePathComponentsFor(host_path, output_directory):
+  """Returns the device path components for a given host path.
+
+  This returns the device path as a list of joinable path components,
+  with None as the first element to indicate that the path should be
+  rooted at $EXTERNAL_STORAGE.
+
+  e.g., given
+
+    '$RUNTIME_DEPS_ROOT_DIR/foo/bar/baz.txt'
+
+  this would return
+
+    [None, 'foo', 'bar', 'baz.txt']
+
+  This handles a couple classes of paths differently than it otherwise would:
+    - All .pak files get mapped to top-level paks/
+    - All other dependencies get mapped to the top level directory
+        - If a file is not in the output directory then it's relative path to
+          the output directory will start with .. strings, so we remove those
+          and then the path gets mapped to the top-level directory
+        - If a file is in the output directory then the relative path to the
+          output directory gets mapped to the top-level directory
+
+  e.g. given
+
+    '$RUNTIME_DEPS_ROOT_DIR/out/Release/icu_fake_dir/icudtl.dat'
+
+  this would return
+
+    [None, 'icu_fake_dir', 'icudtl.dat']
+
+  Args:
+    host_path: The absolute path to the host file.
+  Returns:
+    A list of device path components.
+  """
+  if (host_path.startswith(output_directory) and
+      os.path.splitext(host_path)[1] == '.pak'):
+    return [None, 'paks', os.path.basename(host_path)]
+
+  rel_host_path = os.path.relpath(host_path, output_directory)
+
+  device_path_components = [None]
+  p = rel_host_path
+  while p:
+    p, d = os.path.split(p)
+    # The relative path from the output directory to a file under the runtime
+    # deps root directory may start with multiple .. strings, so they need to
+    # be skipped.
+    if d and d != os.pardir:
+      device_path_components.insert(1, d)
+  return device_path_components
+
+
+def GetDataDependencies(runtime_deps_path):
+  """Returns a list of device data dependencies.
+
+  Args:
+    runtime_deps_path: A str path to the .runtime_deps file.
+  Returns:
+    A list of (host_path, device_path) tuples.
+  """
+  if not runtime_deps_path:
+    return []
+
+  with open(runtime_deps_path, 'r') as runtime_deps_file:
+    rel_host_files = [l.strip() for l in runtime_deps_file if l]
+
+  output_directory = constants.GetOutDirectory()
+  abs_host_files = [
+      os.path.abspath(os.path.join(output_directory, r))
+      for r in rel_host_files]
+  filtered_abs_host_files = _FilterDataDeps(abs_host_files)
+  # TODO(crbug.com/752610): Filter out host executables, and investigate
+  # whether other files could be filtered as well.
+  return [(f, DevicePathComponentsFor(f, output_directory))
+          for f in filtered_abs_host_files]
diff --git a/src/build/android/pylib/utils/device_dependencies_test.py b/src/build/android/pylib/utils/device_dependencies_test.py
new file mode 100755
index 0000000..b2da5a7
--- /dev/null
+++ b/src/build/android/pylib/utils/device_dependencies_test.py
@@ -0,0 +1,56 @@
+#! /usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from pylib import constants
+from pylib.utils import device_dependencies
+
+
+class DevicePathComponentsForTest(unittest.TestCase):
+
+  def testCheckedInFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'foo', 'bar', 'baz.txt'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectoryFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'icudtl.dat')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'icudtl.dat'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectorySubdirFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'test_dir', 'icudtl.dat')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'test_dir', 'icudtl.dat'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectoryPakFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'foo.pak')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'paks', 'foo.pak'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/dexdump.py b/src/build/android/pylib/utils/dexdump.py
new file mode 100644
index 0000000..f81ac60
--- /dev/null
+++ b/src/build/android/pylib/utils/dexdump.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import shutil
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+from util import build_utils
+
+DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+
+def Dump(apk_path):
+  """Dumps class and method information from a APK into a dict via dexdump.
+
+  Args:
+    apk_path: An absolute path to an APK file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        <package_name>: {
+          'classes': {
+            <class_name>: {
+              'methods': [<method_1>, <method_2>]
+            }
+          }
+        }
+      }
+  """
+  try:
+    dexfile_dir = tempfile.mkdtemp()
+    parsed_dex_files = []
+    for dex_file in build_utils.ExtractAll(apk_path,
+                                           dexfile_dir,
+                                           pattern='*classes*.dex'):
+      output_xml = cmd_helper.GetCmdOutput(
+          [DEXDUMP_PATH, '-l', 'xml', dex_file])
+      # Dexdump doesn't escape its XML output very well; decode it as utf-8 with
+      # invalid sequences replaced, then remove forbidden characters and
+      # re-encode it (as etree expects a byte string as input so it can figure
+      # out the encoding itself from the XML declaration)
+      BAD_XML_CHARS = re.compile(
+          u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' +
+          u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]')
+      if sys.version_info[0] < 3:
+        decoded_xml = output_xml.decode('utf-8', 'replace')
+        clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml)
+      else:
+        # Line duplicated to avoid pylint redefined-variable-type error.
+        clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml)
+      parsed_dex_files.append(
+          _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8'))))
+    return parsed_dex_files
+  finally:
+    shutil.rmtree(dexfile_dir)
+
+
+def _ParseRootNode(root):
+  """Parses the XML output of dexdump. This output is in the following format.
+
+  This is a subset of the information contained within dexdump output.
+
+  <api>
+    <package name="foo.bar">
+      <class name="Class" extends="foo.bar.SuperClass">
+        <field name="Field">
+        </field>
+        <constructor name="Method">
+          <parameter name="Param" type="int">
+          </parameter>
+        </constructor>
+        <method name="Method">
+          <parameter name="Param" type="int">
+          </parameter>
+        </method>
+      </class>
+    </package>
+  </api>
+  """
+  results = {}
+  for child in root:
+    if child.tag == 'package':
+      package_name = child.attrib['name']
+      parsed_node = _ParsePackageNode(child)
+      if package_name in results:
+        results[package_name]['classes'].update(parsed_node['classes'])
+      else:
+        results[package_name] = parsed_node
+  return results
+
+
+def _ParsePackageNode(package_node):
+  """Parses a <package> node from the dexdump xml output.
+
+  Returns:
+    A dict in the format:
+      {
+        'classes': {
+          <class_1>: {
+            'methods': [<method_1>, <method_2>]
+          },
+          <class_2>: {
+            'methods': [<method_1>, <method_2>]
+          },
+        }
+      }
+  """
+  classes = {}
+  for child in package_node:
+    if child.tag == 'class':
+      classes[child.attrib['name']] = _ParseClassNode(child)
+  return {'classes': classes}
+
+
+def _ParseClassNode(class_node):
+  """Parses a <class> node from the dexdump xml output.
+
+  Returns:
+    A dict in the format:
+      {
+        'methods': [<method_1>, <method_2>]
+      }
+  """
+  methods = []
+  for child in class_node:
+    if child.tag == 'method':
+      methods.append(child.attrib['name'])
+  return {'methods': methods, 'superclass': class_node.attrib['extends']}
diff --git a/src/build/android/pylib/utils/dexdump_test.py b/src/build/android/pylib/utils/dexdump_test.py
new file mode 100755
index 0000000..3197853
--- /dev/null
+++ b/src/build/android/pylib/utils/dexdump_test.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from xml.etree import ElementTree
+
+from pylib.utils import dexdump
+
+# pylint: disable=protected-access
+
+
+class DexdumpXMLParseTest(unittest.TestCase):
+
+  def testParseRootXmlNode(self):
+    example_xml_string = (
+        '<api>'
+        '<package name="com.foo.bar1">'
+        '<class'
+        '  name="Class1"'
+        '  extends="java.lang.Object"'
+        '  abstract="false"'
+        '  static="false"'
+        '  final="true"'
+        '  visibility="public">'
+        '<method'
+        '  name="class1Method1"'
+        '  return="java.lang.String"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '<method'
+        '  name="class1Method2"'
+        '  return="viod"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '</class>'
+        '<class'
+        '  name="Class2"'
+        '  extends="java.lang.Object"'
+        '  abstract="false"'
+        '  static="false"'
+        '  final="true"'
+        '  visibility="public">'
+        '<method'
+        '  name="class2Method1"'
+        '  return="java.lang.String"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '</class>'
+        '</package>'
+        '<package name="com.foo.bar2">'
+        '</package>'
+        '<package name="com.foo.bar3">'
+        '</package>'
+        '</api>')
+
+    actual = dexdump._ParseRootNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'com.foo.bar1' : {
+        'classes': {
+          'Class1': {
+            'methods': ['class1Method1', 'class1Method2'],
+            'superclass': 'java.lang.Object',
+          },
+          'Class2': {
+            'methods': ['class2Method1'],
+            'superclass': 'java.lang.Object',
+          }
+        },
+      },
+      'com.foo.bar2' : {'classes': {}},
+      'com.foo.bar3' : {'classes': {}},
+    }
+    self.assertEquals(expected, actual)
+
+  def testParsePackageNode(self):
+    example_xml_string = (
+        '<package name="com.foo.bar">'
+        '<class name="Class1" extends="java.lang.Object">'
+        '</class>'
+        '<class name="Class2" extends="java.lang.Object">'
+        '</class>'
+        '</package>')
+
+
+    actual = dexdump._ParsePackageNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'classes': {
+        'Class1': {
+          'methods': [],
+          'superclass': 'java.lang.Object',
+        },
+        'Class2': {
+          'methods': [],
+          'superclass': 'java.lang.Object',
+        },
+      },
+    }
+    self.assertEquals(expected, actual)
+
+  def testParseClassNode(self):
+    example_xml_string = (
+        '<class name="Class1" extends="java.lang.Object">'
+        '<method name="method1">'
+        '</method>'
+        '<method name="method2">'
+        '</method>'
+        '</class>')
+
+    actual = dexdump._ParseClassNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'methods': ['method1', 'method2'],
+      'superclass': 'java.lang.Object',
+    }
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/gold_utils.py b/src/build/android/pylib/utils/gold_utils.py
new file mode 100644
index 0000000..0b79a6d
--- /dev/null
+++ b/src/build/android/pylib/utils/gold_utils.py
@@ -0,0 +1,78 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""//build/android implementations of //testing/skia_gold_common.
+
+Used for interacting with the Skia Gold image diffing service.
+"""
+
+import os
+import shutil
+
+from devil.utils import cmd_helper
+from pylib.base.output_manager import Datatype
+from pylib.constants import host_paths
+from pylib.utils import repo_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+  from skia_gold_common import skia_gold_session
+  from skia_gold_common import skia_gold_session_manager
+  from skia_gold_common import skia_gold_properties
+
+
+class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession):
+  def _StoreDiffLinks(self, image_name, output_manager, output_dir):
+    """See SkiaGoldSession._StoreDiffLinks for general documentation.
+
+    |output_manager| must be a build.android.pylib.base.OutputManager instance.
+    """
+    given_path = closest_path = diff_path = None
+    # The directory should contain "input-<hash>.png", "closest-<hash>.png",
+    # and "diff.png".
+    for f in os.listdir(output_dir):
+      filepath = os.path.join(output_dir, f)
+      if f.startswith('input-'):
+        given_path = filepath
+      elif f.startswith('closest-'):
+        closest_path = filepath
+      elif f == 'diff.png':
+        diff_path = filepath
+    results = self._comparison_results.setdefault(image_name,
+                                                  self.ComparisonResults())
+    if given_path:
+      with output_manager.ArchivedTempfile('given_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as given_file:
+        shutil.move(given_path, given_file.name)
+      results.local_diff_given_image = given_file.Link()
+    if closest_path:
+      with output_manager.ArchivedTempfile('closest_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as closest_file:
+        shutil.move(closest_path, closest_file.name)
+      results.local_diff_closest_image = closest_file.Link()
+    if diff_path:
+      with output_manager.ArchivedTempfile('diff_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as diff_file:
+        shutil.move(diff_path, diff_file.name)
+      results.local_diff_diff_image = diff_file.Link()
+
+  @staticmethod
+  def _RunCmdForRcAndOutput(cmd):
+    rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd,
+                                                          merge_stderr=True)
+    return rc, stdout
+
+
+class AndroidSkiaGoldSessionManager(
+    skia_gold_session_manager.SkiaGoldSessionManager):
+  @staticmethod
+  def GetSessionClass():
+    return AndroidSkiaGoldSession
+
+
+class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties):
+  @staticmethod
+  def _GetGitOriginMasterHeadSha1():
+    return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT)
diff --git a/src/build/android/pylib/utils/gold_utils_test.py b/src/build/android/pylib/utils/gold_utils_test.py
new file mode 100755
index 0000000..2d3cc5c
--- /dev/null
+++ b/src/build/android/pylib/utils/gold_utils_test.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for gold_utils."""
+
+#pylint: disable=protected-access
+
+import contextlib
+import os
+import tempfile
+import unittest
+
+from pylib.constants import host_paths
+from pylib.utils import gold_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+  from skia_gold_common import unittest_utils
+
+import mock  # pylint: disable=import-error
+from pyfakefs import fake_filesystem_unittest  # pylint: disable=import-error
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+def assertArgWith(test, arg_list, arg, value):
+  i = arg_list.index(arg)
+  test.assertEqual(arg_list[i + 1], value)
+
+
+class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = gold_utils.AndroidSkiaGoldProperties(args)
+    session = gold_utils.AndroidSkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                'corpus',
+                                                instance='instance')
+    session.Diff('name', 'png_file', None)
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('diff', call_args)
+    assertArgWith(self, call_args, '--corpus', 'corpus')
+    # TODO(skbug.com/10610): Remove the -public once we go back to using the
+    # non-public instance, or add a second test for testing that the correct
+    # instance is chosen if we decide to support both depending on what the
+    # user is authenticated for.
+    assertArgWith(self, call_args, '--instance', 'instance-public')
+    assertArgWith(self, call_args, '--input', 'png_file')
+    assertArgWith(self, call_args, '--test', 'name')
+    # TODO(skbug.com/10611): Re-add this assert and remove the check for the
+    # absence of the directory once we switch back to using the proper working
+    # directory.
+    # assertArgWith(self, call_args, '--work-dir', self._working_dir)
+    self.assertNotIn(self._working_dir, call_args)
+    i = call_args.index('--out-dir')
+    # The output directory should be a subdirectory of the working directory.
+    self.assertIn(self._working_dir, call_args[i + 1])
+
+
+class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase):
+  class FakeArchivedFile(object):
+    def __init__(self, path):
+      self.name = path
+
+    def Link(self):
+      return 'file://' + self.name
+
+  class FakeOutputManager(object):
+    def __init__(self):
+      self.output_dir = tempfile.mkdtemp()
+
+    @contextlib.contextmanager
+    def ArchivedTempfile(self, image_name, _, __):
+      filepath = os.path.join(self.output_dir, image_name)
+      yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath)
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  def test_outputManagerUsed(self):
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = gold_utils.AndroidSkiaGoldProperties(args)
+    session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f:
+      f.write('input')
+    with open(os.path.join(self._working_dir, 'closest-closesthash.png'),
+              'w') as f:
+      f.write('closest')
+    with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f:
+      f.write('diff')
+
+    output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager()
+    session._StoreDiffLinks('foo', output_manager, self._working_dir)
+
+    copied_input = os.path.join(output_manager.output_dir, 'given_foo.png')
+    copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png')
+    copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png')
+    with open(copied_input) as f:
+      self.assertEqual(f.read(), 'input')
+    with open(copied_closest) as f:
+      self.assertEqual(f.read(), 'closest')
+    with open(copied_diff) as f:
+      self.assertEqual(f.read(), 'diff')
+
+    self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input)
+    self.assertEqual(session.GetClosestImageLink('foo'),
+                     'file://' + copied_closest)
+    self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/google_storage_helper.py b/src/build/android/pylib/utils/google_storage_helper.py
new file mode 100644
index 0000000..d184810
--- /dev/null
+++ b/src/build/android/pylib/utils/google_storage_helper.py
@@ -0,0 +1,126 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to Google Storage.
+
+Text data should be streamed to logdog using |logdog_helper| module.
+Due to logdog not having image or HTML viewer, those instead should be uploaded
+to Google Storage directly using this module.
+"""
+
+import logging
+import os
+import sys
+import time
+import urlparse
+
+from pylib.constants import host_paths
+from pylib.utils import decorators
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+_GSUTIL_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult',
+    'third_party', 'gsutil', 'gsutil.py')
+_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
+_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
+
+
+@decorators.NoRaiseException(default_return_value='')
+def upload(name, filepath, bucket, gs_args=None, command_args=None,
+           content_type=None, authenticated_link=True):
+  """Uploads data to Google Storage.
+
+  Args:
+    name: Name of the file on Google Storage.
+    filepath: Path to file you want to upload.
+    bucket: Bucket to upload file to.
+    content_type: Content type to upload as. If not specified, Google storage
+        will attempt to infer content type from file extension.
+    authenticated_link: Whether to return a link that requires user to
+        authenticate with a Google account. Setting this to false will return
+        a link that does not require user to be signed into Google account but
+        will only work for completely public storage buckets.
+  Returns:
+    Web link to item uploaded to Google Storage bucket.
+  """
+  bucket = _format_bucket_name(bucket)
+
+  gs_path = 'gs://%s/%s' % (bucket, name)
+  logging.info('Uploading %s to %s', filepath, gs_path)
+
+  cmd = [_GSUTIL_PATH, '-q']
+  cmd.extend(gs_args or [])
+  if content_type:
+    cmd.extend(['-h', 'Content-Type:%s' % content_type])
+  cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
+
+  cmd_helper.RunCmd(cmd)
+
+  return get_url_link(name, bucket, authenticated_link)
+
+
+@decorators.NoRaiseException(default_return_value='')
+def read_from_link(link):
+  # Note that urlparse returns the path with an initial '/', so we only need to
+  # add one more after the 'gs;'
+  gs_path = 'gs:/%s' % urlparse.urlparse(link).path
+  cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
+  return cmd_helper.GetCmdOutput(cmd)
+
+
+@decorators.NoRaiseException(default_return_value=False)
+def exists(name, bucket):
+  bucket = _format_bucket_name(bucket)
+  gs_path = 'gs://%s/%s' % (bucket, name)
+
+  cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
+  return_code = cmd_helper.RunCmd(cmd)
+  return return_code == 0
+
+
+# TODO(jbudorick): Delete this function. Only one user of it.
+def unique_name(basename, suffix='', timestamp=True, device=None):
+  """Helper function for creating a unique name for a file to store in GS.
+
+  Args:
+    basename: Base of the unique filename.
+    suffix: Suffix of filename.
+    timestamp: Whether or not to add a timestamp to name.
+    device: Device to add device serial of to name.
+  """
+  return '%s%s%s%s' % (
+      basename,
+      '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
+          if timestamp else '',
+      '_%s' % device.serial if device else '',
+      suffix)
+
+
+def get_url_link(name, bucket, authenticated_link=True):
+  """Get url link before/without uploading.
+
+  Args:
+    name: Name of the file on Google Storage.
+    bucket: Bucket to upload file to.
+    authenticated_link: Whether to return a link that requires user to
+        authenticate with a Google account. Setting this to false will return
+        a link that does not require user to be signed into Google account but
+        will only work for completely public storage buckets.
+  Returns:
+    Web link to item to be uploaded to Google Storage bucket
+  """
+  bucket = _format_bucket_name(bucket)
+  url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
+  return os.path.join(url_template % bucket, name)
+
+
+def _format_bucket_name(bucket):
+  if bucket.startswith('gs://'):
+    bucket = bucket[len('gs://'):]
+  if bucket.endswith('/'):
+    bucket = bucket[:-1]
+  return bucket
diff --git a/src/build/android/pylib/utils/instrumentation_tracing.py b/src/build/android/pylib/utils/instrumentation_tracing.py
new file mode 100644
index 0000000..f1d03a0
--- /dev/null
+++ b/src/build/android/pylib/utils/instrumentation_tracing.py
@@ -0,0 +1,204 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions to instrument all Python function calls.
+
+This generates a JSON file readable by Chrome's about:tracing. To use it,
+either call start_instrumenting and stop_instrumenting at the appropriate times,
+or use the Instrument context manager.
+
+A function is only traced if it is from a Python module that matches at least
+one regular expression object in to_include, and does not match any in
+to_exclude. In between the start and stop events, every function call of a
+function from such a module will be added to the trace.
+"""
+
+import contextlib
+import functools
+import inspect
+import os
+import re
+import sys
+import threading
+
+from py_trace_event import trace_event
+
+
+# Modules to exclude by default (to avoid problems like infinite loops)
+DEFAULT_EXCLUDE = [r'py_trace_event\..*']
+
+class _TraceArguments(object):
+  def __init__(self):
+    """Wraps a dictionary to ensure safe evaluation of repr()."""
+    self._arguments = {}
+
+  @staticmethod
+  def _safeStringify(item):
+    try:
+      item_str = repr(item)
+    except Exception: # pylint: disable=broad-except
+      try:
+        item_str = str(item)
+      except Exception: # pylint: disable=broad-except
+        item_str = "<ERROR>"
+    return item_str
+
+  def add(self, key, val):
+    key_str = _TraceArguments._safeStringify(key)
+    val_str = _TraceArguments._safeStringify(val)
+
+    self._arguments[key_str] = val_str
+
+  def __repr__(self):
+    return repr(self._arguments)
+
+
+saved_thread_ids = set()
+
+def _shouldTrace(frame, to_include, to_exclude, included, excluded):
+  """
+  Decides whether or not the function called in frame should be traced.
+
+  Args:
+    frame: The Python frame object of this function call.
+    to_include: Set of regex objects for modules which should be traced.
+    to_exclude: Set of regex objects for modules which should not be traced.
+    included: Set of module names we've determined should be traced.
+    excluded: Set of module names we've determined should not be traced.
+  """
+  if not inspect.getmodule(frame):
+    return False
+
+  module_name = inspect.getmodule(frame).__name__
+
+  if module_name in included:
+    includes = True
+  elif to_include:
+    includes = any([pattern.match(module_name) for pattern in to_include])
+  else:
+    includes = True
+
+  if includes:
+    included.add(module_name)
+  else:
+    return False
+
+  # Find the modules of every function in the stack trace.
+  frames = inspect.getouterframes(frame)
+  calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
+
+  # Return False for anything with an excluded module's function anywhere in the
+  # stack trace (even if the function itself is in an included module).
+  if to_exclude:
+    for calling_module in calling_module_names:
+      if calling_module in excluded:
+        return False
+      for pattern in to_exclude:
+        if pattern.match(calling_module):
+          excluded.add(calling_module)
+          return False
+
+  return True
+
+def _generate_trace_function(to_include, to_exclude):
+  to_include = {re.compile(item) for item in to_include}
+  to_exclude = {re.compile(item) for item in to_exclude}
+  to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
+
+  included = set()
+  excluded = set()
+
+  tracing_pid = os.getpid()
+
+  def traceFunction(frame, event, arg):
+    del arg
+
+    # Don't try to trace in subprocesses.
+    if os.getpid() != tracing_pid:
+      sys.settrace(None)
+      return None
+
+    # pylint: disable=unused-argument
+    if event not in ("call", "return"):
+      return None
+
+    function_name = frame.f_code.co_name
+    filename = frame.f_code.co_filename
+    line_number = frame.f_lineno
+
+    if _shouldTrace(frame, to_include, to_exclude, included, excluded):
+      if event == "call":
+        # This function is beginning; we save the thread name (if that hasn't
+        # been done), record the Begin event, and return this function to be
+        # used as the local trace function.
+
+        thread_id = threading.current_thread().ident
+
+        if thread_id not in saved_thread_ids:
+          thread_name = threading.current_thread().name
+
+          trace_event.trace_set_thread_name(thread_name)
+
+          saved_thread_ids.add(thread_id)
+
+        arguments = _TraceArguments()
+        # The function's argument values are stored in the frame's
+        # |co_varnames| as the first |co_argcount| elements. (Following that
+        # are local variables.)
+        for idx in range(frame.f_code.co_argcount):
+          arg_name = frame.f_code.co_varnames[idx]
+          arguments.add(arg_name, frame.f_locals[arg_name])
+        trace_event.trace_begin(function_name, arguments=arguments,
+                                module=inspect.getmodule(frame).__name__,
+                                filename=filename, line_number=line_number)
+
+        # Return this function, so it gets used as the "local trace function"
+        # within this function's frame (and in particular, gets called for this
+        # function's "return" event).
+        return traceFunction
+
+      if event == "return":
+        trace_event.trace_end(function_name)
+        return None
+
+  return traceFunction
+
+
+def no_tracing(f):
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    trace_func = sys.gettrace()
+    try:
+      sys.settrace(None)
+      threading.settrace(None)
+      return f(*args, **kwargs)
+    finally:
+      sys.settrace(trace_func)
+      threading.settrace(trace_func)
+  return wrapper
+
+
+def start_instrumenting(output_file, to_include=(), to_exclude=()):
+  """Enable tracing of all function calls (from specified modules)."""
+  trace_event.trace_enable(output_file)
+
+  traceFunc = _generate_trace_function(to_include, to_exclude)
+  sys.settrace(traceFunc)
+  threading.settrace(traceFunc)
+
+
+def stop_instrumenting():
+  trace_event.trace_disable()
+
+  sys.settrace(None)
+  threading.settrace(None)
+
+
+@contextlib.contextmanager
+def Instrument(output_file, to_include=(), to_exclude=()):
+  try:
+    start_instrumenting(output_file, to_include, to_exclude)
+    yield None
+  finally:
+    stop_instrumenting()
diff --git a/src/build/android/pylib/utils/local_utils.py b/src/build/android/pylib/utils/local_utils.py
new file mode 100644
index 0000000..027cca3
--- /dev/null
+++ b/src/build/android/pylib/utils/local_utils.py
@@ -0,0 +1,19 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for determining if a test is being run locally or not."""
+
+import os
+
+
+def IsOnSwarming():
+  """Determines whether we are on swarming or not.
+
+  Returns:
+    True if the test is being run on swarming, otherwise False.
+  """
+  # Look for the presence of the SWARMING_SERVER environment variable as a
+  # heuristic to determine whether we're running on a workstation or a bot.
+  # This should always be set on swarming, but would be strange to be set on
+  # a workstation.
+  return 'SWARMING_SERVER' in os.environ
diff --git a/src/build/android/pylib/utils/logdog_helper.py b/src/build/android/pylib/utils/logdog_helper.py
new file mode 100644
index 0000000..68a7ba5
--- /dev/null
+++ b/src/build/android/pylib/utils/logdog_helper.py
@@ -0,0 +1,94 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to logdog."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import decorators
+
+sys.path.insert(0, os.path.abspath(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client')))
+from libs.logdog import bootstrap # pylint: disable=import-error
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def text(name, data, content_type=None):
+  """Uploads text to logdog.
+
+  Args:
+    name: Name of the logdog stream.
+    data: String with data you want to upload.
+    content_type: The optional content type of the stream. If None, a
+      default content type will be chosen.
+
+  Returns:
+    Link to view uploaded text in logdog viewer.
+  """
+  logging.info('Writing text to logdog stream, %s', name)
+  with get_logdog_client().text(name, content_type=content_type) as stream:
+    stream.write(data)
+    return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value=None,
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def open_text(name):
+  """Returns a file like object which you can write to.
+
+  Args:
+    name: Name of the logdog stream.
+
+  Returns:
+    A file like object. close() file when done.
+  """
+  logging.info('Opening text logdog stream, %s', name)
+  return get_logdog_client().open_text(name)
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def binary(name, binary_path):
+  """Uploads binary to logdog.
+
+  Args:
+    name: Name of the logdog stream.
+    binary_path: Path to binary you want to upload.
+
+  Returns:
+    Link to view uploaded binary in logdog viewer.
+  """
+  logging.info('Writing binary to logdog stream, %s', name)
+  with get_logdog_client().binary(name) as stream:
+    with open(binary_path, 'r') as f:
+      stream.write(f.read())
+      return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def get_viewer_url(name):
+  """Get Logdog viewer URL.
+
+  Args:
+    name: Name of the logdog stream.
+
+  Returns:
+    Link to view uploaded binary in logdog viewer.
+  """
+  return get_logdog_client().get_viewer_url(name)
+
+
+@decorators.Memoize
+def get_logdog_client():
+  logging.info('Getting logdog client.')
+  return bootstrap.ButlerBootstrap.probe().stream_client()
diff --git a/src/build/android/pylib/utils/logging_utils.py b/src/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000..9c4eae3
--- /dev/null
+++ b/src/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+
+from pylib.constants import host_paths
+
+_COLORAMA_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
+
+with host_paths.SysPath(_COLORAMA_PATH, position=0):
+  import colorama
+
+BACK = colorama.Back
+FORE = colorama.Fore
+STYLE = colorama.Style
+
+
+class _ColorFormatter(logging.Formatter):
+  # pylint does not see members added dynamically in the constructor.
+  # pylint: disable=no-member
+  color_map = {
+    logging.DEBUG: (FORE.CYAN),
+    logging.WARNING: (FORE.YELLOW),
+    logging.ERROR: (FORE.RED),
+    logging.CRITICAL: (BACK.RED),
+  }
+
+  def __init__(self, wrapped_formatter=None):
+    """Wraps a |logging.Formatter| and adds color."""
+    super(_ColorFormatter, self).__init__(self)
+    self._wrapped_formatter = wrapped_formatter or logging.Formatter()
+
+  #override
+  def format(self, record):
+    message = self._wrapped_formatter.format(record)
+    return self.Colorize(message, record.levelno)
+
+  def Colorize(self, message, log_level):
+    try:
+      return (''.join(self.color_map[log_level]) + message +
+              colorama.Style.RESET_ALL)
+    except KeyError:
+      return message
+
+
+class ColorStreamHandler(logging.StreamHandler):
+  """Handler that can be used to colorize logging output.
+
+  Example using a specific logger:
+
+    logger = logging.getLogger('my_logger')
+    logger.addHandler(ColorStreamHandler())
+    logger.info('message')
+
+  Example using the root logger:
+
+    ColorStreamHandler.MakeDefault()
+    logging.info('message')
+
+  """
+  def __init__(self, force_color=False):
+    super(ColorStreamHandler, self).__init__()
+    self.force_color = force_color
+    self.setFormatter(logging.Formatter())
+
+  @property
+  def is_tty(self):
+    isatty = getattr(self.stream, 'isatty', None)
+    return isatty and isatty()
+
+  #override
+  def setFormatter(self, formatter):
+    if self.force_color or self.is_tty:
+      formatter = _ColorFormatter(formatter)
+    super(ColorStreamHandler, self).setFormatter(formatter)
+
+  @staticmethod
+  def MakeDefault(force_color=False):
+     """
+     Replaces the default logging handlers with a coloring handler. To use
+     a colorizing handler at the same time as others, either register them
+     after this call, or add the ColorStreamHandler on the logger using
+     Logger.addHandler()
+
+     Args:
+       force_color: Set to True to bypass the tty check and always colorize.
+     """
+     # If the existing handlers aren't removed, messages are duplicated
+     logging.getLogger().handlers = []
+     logging.getLogger().addHandler(ColorStreamHandler(force_color))
+
+
+@contextlib.contextmanager
+def OverrideColor(level, color):
+  """Temporarily override the logging color for a specified level.
+
+  Args:
+    level: logging level whose color gets overridden.
+    color: tuple of formats to apply to log lines.
+  """
+  prev_colors = {}
+  for handler in logging.getLogger().handlers:
+    if isinstance(handler.formatter, _ColorFormatter):
+      prev_colors[handler.formatter] = handler.formatter.color_map[level]
+      handler.formatter.color_map[level] = color
+  try:
+    yield
+  finally:
+    for formatter, prev_color in prev_colors.iteritems():
+      formatter.color_map[level] = prev_color
+
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+  """Momentarilly suppress logging events from all loggers.
+
+  TODO(jbudorick): This is not thread safe. Log events from other threads might
+  also inadvertently disappear.
+
+  Example:
+
+    with logging_utils.SuppressLogging():
+      # all but CRITICAL logging messages are suppressed
+      logging.info('just doing some thing') # not shown
+      logging.critical('something really bad happened') # still shown
+
+  Args:
+    level: logging events with this or lower levels are suppressed.
+  """
+  logging.disable(level)
+  yield
+  logging.disable(logging.NOTSET)
diff --git a/src/build/android/pylib/utils/maven_downloader.py b/src/build/android/pylib/utils/maven_downloader.py
new file mode 100755
index 0000000..1dc1542
--- /dev/null
+++ b/src/build/android/pylib/utils/maven_downloader.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
+import devil_chromium  # pylint: disable=unused-import
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+
+
+def _MakeDirsIfAbsent(path):
+  try:
+    os.makedirs(path)
+  except OSError as err:
+    if err.errno != errno.EEXIST or not os.path.isdir(path):
+      raise
+
+
+class MavenDownloader(object):
+  '''
+  Downloads and installs the requested artifacts from the Google Maven repo.
+  The artifacts are expected to be specified in the format
+  "group_id:artifact_id:version:file_type", as the default file type is JAR
+  but most Android libraries are provided as AARs, which would otherwise fail
+  downloading. See Install()
+  '''
+
+  # Remote repository to download the artifacts from. The support library and
+  # Google Play service are only distributed there, but third party libraries
+  # could use Maven Central or JCenter for example. The default Maven remote
+  # is Maven Central.
+  _REMOTE_REPO = 'https://maven.google.com'
+
+  # Default Maven repository.
+  _DEFAULT_REPO_PATH = os.path.join(
+      os.path.expanduser('~'), '.m2', 'repository')
+
+  def __init__(self, debug=False):
+    self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
+    self._remote_url = MavenDownloader._REMOTE_REPO
+    self._debug = debug
+
+  def Install(self, target_repo, artifacts, include_poms=False):
+    logging.info('Installing %d artifacts...', len(artifacts))
+    downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
+                   for artifact in artifacts]
+    if self._debug:
+      for downloader in downloaders:
+        downloader.Run(include_poms)
+    else:
+      parallelizer.SyncParallelizer(downloaders).Run(include_poms)
+    logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
+
+  @property
+  def repo_path(self):
+    return self._repo_path
+
+  @property
+  def remote_url(self):
+    return self._remote_url
+
+  @property
+  def debug(self):
+    return self._debug
+
+
+class _SingleArtifactDownloader(object):
+  '''Handles downloading and installing a single Maven artifact.'''
+
+  _POM_FILE_TYPE = 'pom'
+
+  def __init__(self, download_manager, artifact, target_repo):
+    self._download_manager = download_manager
+    self._artifact = artifact
+    self._target_repo = target_repo
+
+  def Run(self, include_pom=False):
+    parts = self._artifact.split(':')
+    if len(parts) != 4:
+      raise Exception('Artifacts expected as '
+                      '"group_id:artifact_id:version:file_type".')
+    group_id, artifact_id, version, file_type = parts
+    self._InstallArtifact(group_id, artifact_id, version, file_type)
+
+    if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
+      self._InstallArtifact(group_id, artifact_id, version,
+                            _SingleArtifactDownloader._POM_FILE_TYPE)
+
+  def _InstallArtifact(self, group_id, artifact_id, version, file_type):
+    logging.debug('Processing %s', self._artifact)
+
+    download_relpath = self._DownloadArtifact(
+        group_id, artifact_id, version, file_type)
+    logging.debug('Downloaded.')
+
+    install_path = self._ImportArtifact(download_relpath)
+    logging.debug('Installed %s', os.path.relpath(install_path))
+
+  def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
+    '''
+    Downloads the specified artifact using maven, to its standard location, see
+    MavenDownloader._DEFAULT_REPO_PATH.
+    '''
+    cmd = ['mvn',
+           'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
+           '-DremoteRepositories={}'.format(self._download_manager.remote_url),
+           '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
+                                           file_type)]
+
+    stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
+
+    try:
+      ret_code = cmd_helper.Call(cmd, stdout=stdout)
+      if ret_code != 0:
+        raise Exception('Command "{}" failed'.format(' '.join(cmd)))
+    except OSError as e:
+      if e.errno == os.errno.ENOENT:
+        raise Exception('mvn command not found. Please install Maven.')
+      raise
+
+    return os.path.join(os.path.join(*group_id.split('.')),
+                        artifact_id,
+                        version,
+                        '{}-{}.{}'.format(artifact_id, version, file_type))
+
+  def _ImportArtifact(self, artifact_path):
+    src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
+    dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
+
+    _MakeDirsIfAbsent(dst_dir)
+    shutil.copy(src_dir, dst_dir)
+
+    return dst_dir
diff --git a/src/build/android/pylib/utils/proguard.py b/src/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000..9d5bae2
--- /dev/null
+++ b/src/build/android/pylib/utils/proguard.py
@@ -0,0 +1,285 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+    r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+    r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$')
+_ELEMENT_PRIMITIVE = 0
+_ELEMENT_ARRAY = 1
+_ELEMENT_ANNOTATION = 2
+_PROGUARD_ELEMENT_RES = [
+  (_ELEMENT_PRIMITIVE,
+   re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')),
+  (_ELEMENT_ARRAY,
+   re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')),
+  (_ELEMENT_ANNOTATION,
+   re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$'))
+]
+_PROGUARD_INDENT_WIDTH = 2
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$')
+
+
+def _GetProguardPath():
+  return os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'proguard',
+                      'lib', 'proguard603.jar')
+
+
+def Dump(jar_path):
+  """Dumps class and method information from a JAR into a dict via proguard.
+
+  Args:
+    jar_path: An absolute path to the JAR file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        'classes': [
+          {
+            'class': '',
+            'superclass': '',
+            'annotations': {/* dict -- see below */},
+            'methods': [
+              {
+                'method': '',
+                'annotations': {/* dict -- see below */},
+              },
+              ...
+            ],
+          },
+          ...
+        ],
+      }
+
+    Annotations dict format:
+      {
+        'empty-annotation-class-name': None,
+        'annotation-class-name': {
+          'field': 'primitive-value',
+          'field': [ 'array-item-1', 'array-item-2', ... ],
+          'field': {
+            /* Object value */
+            'field': 'primitive-value',
+            'field': [ 'array-item-1', 'array-item-2', ... ],
+            'field': { /* Object value */ }
+          }
+        }
+      }
+
+    Note that for top-level annotations their class names are used for
+    identification, whereas for any nested annotations the corresponding
+    field names are used.
+
+    One drawback of this approach is that an array containing empty
+    annotation classes will be represented as an array of 'None' values,
+    thus it will not be possible to find out annotation class names.
+    On the other hand, storing both annotation class name and the field name
+    would produce a very complex JSON.
+  """
+
+  with tempfile.NamedTemporaryFile() as proguard_output:
+    cmd_helper.GetCmdStatusAndOutput([
+        'java',
+        '-jar', _GetProguardPath(),
+        '-injars', jar_path,
+        '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+        '-dump', proguard_output.name])
+    return Parse(proguard_output)
+
+class _AnnotationElement(object):
+  def __init__(self, name, ftype, depth):
+    self.ref = None
+    self.name = name
+    self.ftype = ftype
+    self.depth = depth
+
+class _ParseState(object):
+  _INITIAL_VALUES = (lambda: None, list, dict)
+  # Empty annotations are represented as 'None', not as an empty dictionary.
+  _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None)
+
+  def __init__(self):
+    self._class_result = None
+    self._method_result = None
+    self._parse_annotations = False
+    self._annotation_stack = []
+
+  def ResetPerSection(self, section_name):
+    self.InitMethod(None)
+    self._parse_annotations = (
+      section_name in ['Class file attributes', 'Methods'])
+
+  def ParseAnnotations(self):
+    return self._parse_annotations
+
+  def CreateAndInitClass(self, class_name):
+    self.InitMethod(None)
+    self._class_result = {
+      'class': class_name,
+      'superclass': '',
+      'annotations': {},
+      'methods': [],
+    }
+    return self._class_result
+
+  def HasCurrentClass(self):
+    return bool(self._class_result)
+
+  def SetSuperClass(self, superclass):
+    assert self.HasCurrentClass()
+    self._class_result['superclass'] = superclass
+
+  def InitMethod(self, method_name):
+    self._annotation_stack = []
+    if method_name:
+      self._method_result = {
+        'method': method_name,
+        'annotations': {},
+      }
+      self._class_result['methods'].append(self._method_result)
+    else:
+      self._method_result = None
+
+  def InitAnnotation(self, annotation, depth):
+    if not self._annotation_stack:
+      # Add a fake parent element comprising 'annotations' dictionary,
+      # so we can work uniformly with both top-level and nested annotations.
+      annotations = _AnnotationElement(
+        '<<<top level>>>', _ELEMENT_ANNOTATION, depth - 1)
+      if self._method_result:
+        annotations.ref = self._method_result['annotations']
+      else:
+        annotations.ref = self._class_result['annotations']
+      self._annotation_stack = [annotations]
+    self._BacktrackAnnotationStack(depth)
+    if not self.HasCurrentAnnotation():
+      self._annotation_stack.append(
+        _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth))
+    self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+  def HasCurrentAnnotation(self):
+    return len(self._annotation_stack) > 1
+
+  def InitAnnotationField(self, field, field_type, depth):
+    self._BacktrackAnnotationStack(depth)
+    # Create the parent representation, if needed. E.g. annotations
+    # are represented with `None`, not with `{}` until they receive the first
+    # field.
+    self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES)
+    if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY:
+      # Nested arrays are not allowed in annotations.
+      assert not field_type == _ELEMENT_ARRAY
+      # Use array index instead of bogus field name.
+      field = len(self._annotation_stack[-1].ref)
+    self._annotation_stack.append(_AnnotationElement(field, field_type, depth))
+    self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+  def UpdateCurrentAnnotationFieldValue(self, value, depth):
+    self._BacktrackAnnotationStack(depth)
+    self._InitOrUpdateCurrentField(value)
+
+  def _CreateAnnotationPlaceHolder(self, constructors):
+    assert self.HasCurrentAnnotation()
+    field = self._annotation_stack[-1]
+    if field.ref is None:
+      field.ref = constructors[field.ftype]()
+      self._InitOrUpdateCurrentField(field.ref)
+
+  def _BacktrackAnnotationStack(self, depth):
+    stack = self._annotation_stack
+    while len(stack) > 0 and stack[-1].depth >= depth:
+      stack.pop()
+
+  def _InitOrUpdateCurrentField(self, value):
+    assert self.HasCurrentAnnotation()
+    parent = self._annotation_stack[-2]
+    assert not parent.ref is None
+    # There can be no nested constant element values.
+    assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION]
+    field = self._annotation_stack[-1]
+    if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE:
+      # The value comes from the output parser via
+      # UpdateCurrentAnnotationFieldValue, and should be a value of a constant
+      # element. If it isn't, just skip it.
+      return
+    if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref):
+      parent.ref.append(value)
+    else:
+      parent.ref[field.name] = value
+
+
+def _GetDepth(prefix):
+  return len(prefix) // _PROGUARD_INDENT_WIDTH
+
+def Parse(proguard_output):
+  results = {
+    'classes': [],
+  }
+
+  state = _ParseState()
+
+  for line in proguard_output:
+    line = line.strip('\r\n')
+
+    m = _PROGUARD_CLASS_RE.match(line)
+    if m:
+      results['classes'].append(
+        state.CreateAndInitClass(m.group(1).replace('/', '.')))
+      continue
+
+    if not state.HasCurrentClass():
+      continue
+
+    m = _PROGUARD_SUPERCLASS_RE.match(line)
+    if m:
+      state.SetSuperClass(m.group(1).replace('/', '.'))
+      continue
+
+    m = _PROGUARD_SECTION_RE.match(line)
+    if m:
+      state.ResetPerSection(m.group(1))
+      continue
+
+    m = _PROGUARD_METHOD_RE.match(line)
+    if m:
+      state.InitMethod(m.group(1))
+      continue
+
+    if not state.ParseAnnotations():
+      continue
+
+    m = _PROGUARD_ANNOTATION_RE.match(line)
+    if m:
+      # Ignore the annotation package.
+      state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1)))
+      continue
+
+    if state.HasCurrentAnnotation():
+      m = None
+      for (element_type, element_re) in _PROGUARD_ELEMENT_RES:
+        m = element_re.match(line)
+        if m:
+          state.InitAnnotationField(
+            m.group(2), element_type, _GetDepth(m.group(1)))
+          break
+      if m:
+        continue
+      m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+      if m:
+        state.UpdateCurrentAnnotationFieldValue(
+          m.group(2), _GetDepth(m.group(1)))
+      else:
+        state.InitMethod(None)
+
+  return results
diff --git a/src/build/android/pylib/utils/proguard_test.py b/src/build/android/pylib/utils/proguard_test.py
new file mode 100755
index 0000000..b11c299
--- /dev/null
+++ b/src/build/android/pylib/utils/proguard_test.py
@@ -0,0 +1,495 @@
+#! /usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.utils import proguard
+
+class TestParse(unittest.TestCase):
+
+  def setUp(self):
+    self.maxDiff = None
+
+  def testClass(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       '  Superclass: java/lang/Object'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': 'java.lang.Object',
+          'annotations': {},
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethod(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       <init>()V'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': '<init>',
+              'annotations': {}
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassAnnotation(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Annotation [Lorg/example/Annotation;]:',
+       '  - Annotation [Lorg/example/AnnotationWithValue;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+       '    - Constant element value [attr1 \'13\']',
+       '      - Utf8 [val1]',
+       '    - Constant element value [attr2 \'13\']',
+       '      - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'Annotation': None,
+            'AnnotationWithValue': {'attr': 'val'},
+            'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassAnnotationWithArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '  - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'AnnotationWithEmptyArray': {'arrayAttr': []},
+            'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+            'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testNestedClassAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 1):',
+       '  - Annotation [Lorg/example/OuterAnnotation;]:',
+       '    - Constant element value [outerAttr \'13\']',
+       '      - Utf8 [outerVal]',
+       '    - Array element value [outerArr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal2]',
+       '    - Annotation element value [emptyAnn]:',
+       '      - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '    - Annotation element value [ann]:',
+       '      - Annotation [Lorg/example/InnerAnnotation;]:',
+       '        - Constant element value [innerAttr \'13\']',
+       '          - Utf8 [innerVal]',
+       '        - Array element value [innerArr]:',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal1]',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal2]',
+       '        - Annotation element value [emptyInnerAnn]:',
+       '          - Annotation [Lorg/example/EmptyAnnotation;]:'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'OuterAnnotation': {
+              'outerAttr': 'outerVal',
+              'outerArr': ['outerArrVal1', 'outerArrVal2'],
+              'emptyAnn': None,
+              'ann': {
+                'innerAttr': 'innerVal',
+                'innerArr': ['innerArrVal1', 'innerArrVal2'],
+                'emptyInnerAnn': None
+              }
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassArraysOfAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 1):',
+       '   - Annotation [Lorg/example/OuterAnnotation;]:',
+       '     - Array element value [arrayWithEmptyAnnotations]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '     - Array element value [outerArray]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/InnerAnnotation;]:',
+       '           - Constant element value [innerAttr \'115\']',
+       '             - Utf8 [innerVal]',
+       '           - Array element value [arguments]:',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg1Attr \'115\']',
+       '                   - Utf8 [arg1Val]',
+       '                 - Array element value [arg1Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [11]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [12]',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg2Attr \'115\']',
+       '                   - Utf8 [arg2Val]',
+       '                 - Array element value [arg2Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [21]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [22]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'OuterAnnotation': {
+              'arrayWithEmptyAnnotations': [None, None],
+              'outerArray': [
+                {
+                  'innerAttr': 'innerVal',
+                  'arguments': [
+                    {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+                    {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+                  ]
+                }
+              ]
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testReadFullClassFileAttributes(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Source file attribute:',
+       '    - Utf8 [Class.java]',
+       '  - Runtime visible annotations attribute:',
+       '    - Annotation [Lorg/example/IntValueAnnotation;]:',
+       '      - Constant element value [value \'73\']',
+       '        - Integer [19]',
+       '  - Inner classes attribute (count = 1)',
+       '    - InnerClassesInfo:',
+       '      Access flags:  0x9 = public static',
+       '      - Class [org/example/Class1]',
+       '      - Class [org/example/Class2]',
+       '      - Utf8 [OnPageFinishedHelper]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'IntValueAnnotation': {
+              'value': '19',
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotation(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/Annotation;]:',
+       '  - Annotation [Lorg/example/AnnotationWithValue;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+       '    - Constant element value [attr1 \'13\']',
+       '      - Utf8 [val1]',
+       '    - Constant element value [attr2 \'13\']',
+       '      - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'Annotation': None,
+                'AnnotationWithValue': {'attr': 'val'},
+                'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotationWithArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '  - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'AnnotationWithEmptyArray': {'arrayAttr': []},
+                'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+                'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotationWithPrimitivesAndArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationTwoArrays;]:',
+       '    - Array element value [arrayAttr1]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '    - Array element value [arrayAttr2]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'AnnotationPrimitiveThenArray': {'attr': 'val',
+                                                 'arrayAttr': ['val']},
+                'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'],
+                                                 'attr': 'val'},
+                'AnnotationTwoArrays': {'arrayAttr1': ['val1'],
+                                        'arrayAttr2': ['val2']}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testNestedMethodAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/OuterAnnotation;]:',
+       '    - Constant element value [outerAttr \'13\']',
+       '      - Utf8 [outerVal]',
+       '    - Array element value [outerArr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal2]',
+       '    - Annotation element value [emptyAnn]:',
+       '      - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '    - Annotation element value [ann]:',
+       '      - Annotation [Lorg/example/InnerAnnotation;]:',
+       '        - Constant element value [innerAttr \'13\']',
+       '          - Utf8 [innerVal]',
+       '        - Array element value [innerArr]:',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal1]',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal2]',
+       '        - Annotation element value [emptyInnerAnn]:',
+       '          - Annotation [Lorg/example/EmptyAnnotation;]:'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'OuterAnnotation': {
+                  'outerAttr': 'outerVal',
+                  'outerArr': ['outerArrVal1', 'outerArrVal2'],
+                  'emptyAnn': None,
+                  'ann': {
+                    'innerAttr': 'innerVal',
+                    'innerArr': ['innerArrVal1', 'innerArrVal2'],
+                    'emptyInnerAnn': None
+                  }
+                }
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodArraysOfAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '   - Annotation [Lorg/example/OuterAnnotation;]:',
+       '     - Array element value [arrayWithEmptyAnnotations]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '     - Array element value [outerArray]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/InnerAnnotation;]:',
+       '           - Constant element value [innerAttr \'115\']',
+       '             - Utf8 [innerVal]',
+       '           - Array element value [arguments]:',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg1Attr \'115\']',
+       '                   - Utf8 [arg1Val]',
+       '                 - Array element value [arg1Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [11]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [12]',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg2Attr \'115\']',
+       '                   - Utf8 [arg2Val]',
+       '                 - Array element value [arg2Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [21]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [22]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'OuterAnnotation': {
+                  'arrayWithEmptyAnnotations': [None, None],
+                  'outerArray': [
+                    {
+                      'innerAttr': 'innerVal',
+                      'arguments': [
+                        {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+                        {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+                      ]
+                    }
+                  ]
+                }
+              }
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/repo_utils.py b/src/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000..f9d300a
--- /dev/null
+++ b/src/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from devil.utils import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+  """Returns the git hash tag for the given directory.
+
+  Args:
+    in_directory: The directory where git is to be run.
+  """
+  command_line = ['git', 'log', '-1', '--pretty=format:%H']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output[0:40]
+
+
+def GetGitOriginMasterHeadSHA1(in_directory):
+  command_line = ['git', 'rev-parse', 'origin/master']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output.strip()
diff --git a/src/build/android/pylib/utils/shared_preference_utils.py b/src/build/android/pylib/utils/shared_preference_utils.py
new file mode 100644
index 0000000..ae0d31b
--- /dev/null
+++ b/src/build/android/pylib/utils/shared_preference_utils.py
@@ -0,0 +1,95 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for modifying an app's settings file using JSON."""
+
+import json
+import logging
+
+
+def UnicodeToStr(data):
+  """Recursively converts any Unicode to Python strings.
+
+  Args:
+    data: The data to be converted.
+
+  Return:
+    A copy of the given data, but with instances of Unicode converted to Python
+    strings.
+  """
+  if isinstance(data, dict):
+    return {UnicodeToStr(key): UnicodeToStr(value)
+            for key, value in data.iteritems()}
+  elif isinstance(data, list):
+    return [UnicodeToStr(element) for element in data]
+  elif isinstance(data, unicode):
+    return data.encode('utf-8')
+  return data
+
+
+def ExtractSettingsFromJson(filepath):
+  """Extracts the settings data from the given JSON file.
+
+  Args:
+    filepath: The path to the JSON file to read.
+
+  Return:
+    The data read from the JSON file with strings converted to Python strings.
+  """
+  # json.load() loads strings as unicode, which causes issues when trying
+  # to edit string values in preference files, so convert to Python strings
+  with open(filepath) as prefs_file:
+    return UnicodeToStr(json.load(prefs_file))
+
+
+def ApplySharedPreferenceSetting(shared_pref, setting):
+  """Applies the given app settings to the given device.
+
+  Modifies an installed app's settings by modifying its shared preference
+  settings file. Provided settings data must be a settings dictionary,
+  which are in the following format:
+  {
+    "package": "com.example.package",
+    "filename": "AppSettingsFile.xml",
+    "supports_encrypted_path": true,
+    "set": {
+      "SomeBoolToSet": true,
+      "SomeStringToSet": "StringValue",
+    },
+    "remove": [
+      "list",
+      "of",
+      "keys",
+      "to",
+      "remove",
+    ]
+  }
+
+  Example JSON files that can be read with ExtractSettingsFromJson and passed to
+  this function are in //chrome/android/shared_preference_files/test/.
+
+  Args:
+    shared_pref: The devil SharedPrefs object for the device the settings will
+        be applied to.
+    setting: A settings dictionary to apply.
+  """
+  shared_pref.Load()
+  for key in setting.get('remove', []):
+    try:
+      shared_pref.Remove(key)
+    except KeyError:
+      logging.warning("Attempted to remove non-existent key %s", key)
+  for key, value in setting.get('set', {}).iteritems():
+    if isinstance(value, bool):
+      shared_pref.SetBoolean(key, value)
+    elif isinstance(value, basestring):
+      shared_pref.SetString(key, value)
+    elif isinstance(value, long) or isinstance(value, int):
+      shared_pref.SetLong(key, value)
+    elif isinstance(value, list):
+      shared_pref.SetStringSet(key, value)
+    else:
+      raise ValueError("Given invalid value type %s for key %s" % (
+          str(type(value)), key))
+  shared_pref.Commit()
diff --git a/src/build/android/pylib/utils/simpleperf.py b/src/build/android/pylib/utils/simpleperf.py
new file mode 100644
index 0000000..b3ba00e
--- /dev/null
+++ b/src/build/android/pylib/utils/simpleperf.py
@@ -0,0 +1,260 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from devil import devil_env
+from devil.android import device_signal
+from devil.android.sdk import version_codes
+from pylib import constants
+
+
+def _ProcessType(proc):
+  _, _, suffix = proc.name.partition(':')
+  if not suffix:
+    return 'browser'
+  if suffix.startswith('sandboxed_process'):
+    return 'renderer'
+  if suffix.startswith('privileged_process'):
+    return 'gpu'
+  return None
+
+
+def _GetSpecifiedPID(device, package_name, process_specifier):
+  if process_specifier is None:
+    return None
+
+  # Check for numeric PID
+  try:
+    pid = int(process_specifier)
+    return pid
+  except ValueError:
+    pass
+
+  # Check for exact process name; can be any of these formats:
+  #   <package>:<process name>, i.e. 'org.chromium.chrome:sandboxed_process0'
+  #   :<process name>, i.e. ':sandboxed_process0'
+  #   <process name>, i.e. 'sandboxed_process0'
+  full_process_name = process_specifier
+  if process_specifier.startswith(':'):
+    full_process_name = package_name + process_specifier
+  elif ':' not in process_specifier:
+    full_process_name = '%s:%s' % (package_name, process_specifier)
+  matching_processes = device.ListProcesses(full_process_name)
+  if len(matching_processes) == 1:
+    return matching_processes[0].pid
+  if len(matching_processes) > 1:
+    raise RuntimeError('Found %d processes with name "%s".' % (
+        len(matching_processes), process_specifier))
+
+  # Check for process type (i.e. 'renderer')
+  package_processes = device.ListProcesses(package_name)
+  matching_processes = [p for p in package_processes if (
+      _ProcessType(p) == process_specifier)]
+  if process_specifier == 'renderer' and len(matching_processes) > 1:
+    raise RuntimeError('Found %d renderer processes; please re-run with only '
+                       'one open tab.' % len(matching_processes))
+  if len(matching_processes) != 1:
+    raise RuntimeError('Found %d processes of type "%s".' % (
+        len(matching_processes), process_specifier))
+  return matching_processes[0].pid
+
+
+def _ThreadsForProcess(device, pid):
+  # The thread list output format for 'ps' is the same regardless of version.
+  # Here's the column headers, and a sample line for a thread belonging to
+  # pid 12345 (note that the last few columns are not aligned with headers):
+  #
+  # USER        PID   TID  PPID     VSZ    RSS WCHAN            ADDR S CMD
+  # u0_i101   12345 24680   567 1357902  97531 futex_wait_queue_me e85acd9c S \
+  #     CrRendererMain
+  if device.build_version_sdk >= version_codes.OREO:
+    pid_regex = (
+        r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
+    ps_cmd = "ps -T -e | grep '%s'" % pid_regex
+    ps_output_lines = device.RunShellCommand(
+        ps_cmd, shell=True, check_return=True)
+  else:
+    ps_cmd = ['ps', '-p', str(pid), '-t']
+    ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
+  result = []
+  for l in ps_output_lines:
+    fields = l.split()
+    # fields[2] is tid, fields[-1] is thread name. Output may include an entry
+    # for the process itself with tid=pid; omit that one.
+    if fields[2] == str(pid):
+      continue
+    result.append((int(fields[2]), fields[-1]))
+  return result
+
+
+def _ThreadType(thread_name):
+  if not thread_name:
+    return 'unknown'
+  if (thread_name.startswith('Chrome_ChildIO') or
+      thread_name.startswith('Chrome_IO')):
+    return 'io'
+  if thread_name.startswith('Compositor'):
+    return 'compositor'
+  if (thread_name.startswith('ChildProcessMai') or
+      thread_name.startswith('CrGpuMain') or
+      thread_name.startswith('CrRendererMain')):
+    return 'main'
+  if thread_name.startswith('RenderThread'):
+    return 'render'
+
+
+def _GetSpecifiedTID(device, pid, thread_specifier):
+  if thread_specifier is None:
+    return None
+
+  # Check for numeric TID
+  try:
+    tid = int(thread_specifier)
+    return tid
+  except ValueError:
+    pass
+
+  # Check for thread type
+  if pid is not None:
+    matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
+        _ThreadType(t[1]) == thread_specifier)]
+    if len(matching_threads) != 1:
+      raise RuntimeError('Found %d threads of type "%s".' % (
+          len(matching_threads), thread_specifier))
+    return matching_threads[0][0]
+
+  return None
+
+
+def PrepareDevice(device):
+  if device.build_version_sdk < version_codes.NOUGAT:
+    raise RuntimeError('Simpleperf profiling is only supported on Android N '
+                       'and later.')
+
+  # Necessary for profiling
+  # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
+  device.SetProp('security.perf_harden', '0')
+
+
+def InstallSimpleperf(device, package_name):
+  package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
+  host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
+  if not host_simpleperf_path:
+    raise Exception('Could not get path to simpleperf executable on host.')
+  device_simpleperf_path = '/'.join(
+      ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
+  device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
+  return device_simpleperf_path
+
+
+@contextlib.contextmanager
+def RunSimpleperf(device, device_simpleperf_path, package_name,
+                  process_specifier, thread_specifier, profiler_args,
+                  host_out_path):
+  pid = _GetSpecifiedPID(device, package_name, process_specifier)
+  tid = _GetSpecifiedTID(device, pid, thread_specifier)
+  if pid is None and tid is None:
+    raise RuntimeError('Could not find specified process/thread running on '
+                       'device. Make sure the apk is already running before '
+                       'attempting to profile.')
+  profiler_args = list(profiler_args)
+  if profiler_args and profiler_args[0] == 'record':
+    profiler_args.pop(0)
+  if '--call-graph' not in profiler_args and '-g' not in profiler_args:
+    profiler_args.append('-g')
+  if '-f' not in profiler_args:
+    profiler_args.extend(('-f', '1000'))
+  device_out_path = '/data/local/tmp/perf.data'
+  if '-o' in profiler_args:
+    device_out_path = profiler_args[profiler_args.index('-o') + 1]
+  else:
+    profiler_args.extend(('-o', device_out_path))
+
+  if tid:
+    profiler_args.extend(('-t', str(tid)))
+  else:
+    profiler_args.extend(('-p', str(pid)))
+
+  adb_shell_simpleperf_process = device.adb.StartShell(
+      [device_simpleperf_path, 'record'] + profiler_args)
+
+  completed = False
+  try:
+    yield
+    completed = True
+
+  finally:
+    device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
+                   quiet=True)
+    if completed:
+      adb_shell_simpleperf_process.wait()
+      device.PullFile(device_out_path, host_out_path)
+
+
+def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
+                             pprof_out_path):
+  # The simpleperf scripts require the unstripped libs to be installed in the
+  # same directory structure as the libs on the device. Much of the logic here
+  # is just figuring out and creating the necessary directory structure, and
+  # symlinking the unstripped shared libs.
+
+  # Get the set of libs that we can symbolize
+  unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
+  unstripped_libs = set(
+      f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
+
+  # report.py will show the directory structure above the shared libs;
+  # that is the directory structure we need to recreate on the host.
+  script_dir = devil_env.config.LocalPath('simpleperf_scripts')
+  report_path = os.path.join(script_dir, 'report.py')
+  report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
+  device_lib_path = None
+  for line in subprocess.check_output(
+      report_cmd, stderr=subprocess.STDOUT).splitlines():
+    fields = line.split()
+    if len(fields) < 5:
+      continue
+    shlib_path = fields[4]
+    shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
+    if shlib_basename in unstripped_libs:
+      device_lib_path = shlib_dirname
+      break
+  if not device_lib_path:
+    raise RuntimeError('No chrome-related symbols in profiling data in %s. '
+                       'Either the process was idle for the entire profiling '
+                       'period, or something went very wrong (and you should '
+                       'file a bug at crbug.com/new with component '
+                       'Speed>Tracing, and assign it to szager@chromium.org).'
+                       % simpleperf_out_path)
+
+  # Recreate the directory structure locally, and symlink unstripped libs.
+  processing_dir = tempfile.mkdtemp()
+  try:
+    processing_lib_dir = os.path.join(
+        processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
+    os.makedirs(processing_lib_dir)
+    for lib in unstripped_libs:
+      unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
+      processing_lib_path = os.path.join(processing_lib_dir, lib)
+      os.symlink(unstripped_lib_path, processing_lib_path)
+
+    # Run the script to annotate symbols and convert from simpleperf format to
+    # pprof format.
+    pprof_converter_script = os.path.join(
+        script_dir, 'pprof_proto_generator.py')
+    pprof_converter_cmd = [
+        sys.executable, pprof_converter_script, '-i', simpleperf_out_path, '-o',
+        os.path.abspath(pprof_out_path), '--ndk_path',
+        constants.ANDROID_NDK_ROOT
+    ]
+    subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
+                            cwd=processing_dir)
+  finally:
+    shutil.rmtree(processing_dir, ignore_errors=True)
diff --git a/src/build/android/pylib/utils/test_filter.py b/src/build/android/pylib/utils/test_filter.py
new file mode 100644
index 0000000..6db6243
--- /dev/null
+++ b/src/build/android/pylib/utils/test_filter.py
@@ -0,0 +1,141 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+
+_CMDLINE_NAME_SEGMENT_RE = re.compile(
+    r' with(?:out)? \{[^\}]*\}')
+
+class ConflictingPositiveFiltersException(Exception):
+  """Raised when both filter file and filter argument have positive filters."""
+
+
+def ParseFilterFile(input_lines):
+  """Converts test filter file contents to positive and negative pattern lists.
+
+  See //testing/buildbot/filters/README.md for description of the
+  syntax that |input_lines| are expected to follow.
+
+  See
+  https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests
+  for description of the syntax that --gtest_filter argument should follow.
+
+  Args:
+    input_lines: An iterable (e.g. a list or a file) containing input lines.
+  Returns:
+    tuple containing the lists of positive patterns and negative patterns
+  """
+  # Strip comments and whitespace from each line and filter non-empty lines.
+  stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
+  filter_lines = [l for l in stripped_lines if l]
+
+  # Split the tests into positive and negative patterns (gtest treats
+  # every pattern after the first '-' sign as an exclusion).
+  positive_patterns = [l for l in filter_lines if l[0] != '-']
+  negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
+  return positive_patterns, negative_patterns
+
+
+def AddFilterOptions(parser):
+  """Adds filter command-line options to the provided parser.
+
+  Args:
+    parser: an argparse.ArgumentParser instance.
+  """
+  parser.add_argument(
+      # Deprecated argument.
+      '--gtest-filter-file',
+      # New argument.
+      '--test-launcher-filter-file',
+      dest='test_filter_file',
+      help='Path to file that contains googletest-style filter strings. '
+      'See also //testing/buildbot/filters/README.md.')
+
+  filter_group = parser.add_mutually_exclusive_group()
+  filter_group.add_argument(
+      '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+      dest='test_filter',
+      help='googletest-style filter string.',
+      default=os.environ.get('GTEST_FILTER'))
+  filter_group.add_argument(
+      '--isolated-script-test-filter',
+      help='isolated script filter string. '
+           'Like gtest filter strings, but with :: separators instead of :')
+
+
+def AppendPatternsToFilter(test_filter, positive_patterns=None,
+                           negative_patterns=None):
+  """Returns a test-filter string with additional patterns.
+
+  Args:
+    test_filter: test filter string
+    positive_patterns: list of positive patterns to add to string
+    negative_patterns: list of negative patterns to add to string
+  """
+  positives = []
+  negatives = []
+  positive = ''
+  negative = ''
+
+  split_filter = test_filter.split('-', 1)
+  if len(split_filter) == 1:
+    positive = split_filter[0]
+  else:
+    positive, negative = split_filter
+
+  positives += [f for f in positive.split(':') if f]
+  negatives += [f for f in negative.split(':') if f]
+
+  positives += positive_patterns if positive_patterns else []
+  negatives += negative_patterns if negative_patterns else []
+
+  final_filter = ':'.join([p.replace('#', '.') for p in positives])
+  if negatives:
+    final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
+  return final_filter
+
+
+def HasPositivePatterns(test_filter):
+  """Returns True if test_filter contains a positive pattern, else False
+
+  Args:
+    test_filter: test-filter style string
+  """
+  return bool(len(test_filter) > 0 and test_filter[0] != '-')
+
+
+def InitializeFilterFromArgs(args):
+  """Returns a filter string from the command-line option values.
+
+  Args:
+    args: an argparse.Namespace instance resulting from a using parser
+      to which the filter options above were added.
+
+  Raises:
+    ConflictingPositiveFiltersException if both filter file and command line
+    specify positive filters.
+  """
+  test_filter = ''
+  if args.isolated_script_test_filter:
+    args.test_filter = args.isolated_script_test_filter.replace('::', ':')
+  if args.test_filter:
+    test_filter = _CMDLINE_NAME_SEGMENT_RE.sub(
+        '', args.test_filter.replace('#', '.'))
+
+  if args.test_filter_file:
+    for test_filter_file in args.test_filter_file.split(';'):
+      with open(test_filter_file, 'r') as f:
+        positive_file_patterns, negative_file_patterns = ParseFilterFile(f)
+        if positive_file_patterns and HasPositivePatterns(test_filter):
+          raise ConflictingPositiveFiltersException(
+              'Cannot specify positive pattern in both filter file and ' +
+              'filter command line argument')
+        test_filter = AppendPatternsToFilter(
+            test_filter,
+            positive_patterns=positive_file_patterns,
+            negative_patterns=negative_file_patterns)
+
+  return test_filter
diff --git a/src/build/android/pylib/utils/test_filter_test.py b/src/build/android/pylib/utils/test_filter_test.py
new file mode 100755
index 0000000..1ae5a7e
--- /dev/null
+++ b/src/build/android/pylib/utils/test_filter_test.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+import tempfile
+import unittest
+
+from pylib.utils import test_filter
+
+class ParseFilterFileTest(unittest.TestCase):
+
+  def testParseFilterFile_commentsAndBlankLines(self):
+    input_lines = [
+      'positive1',
+      '# comment',
+      'positive2  # Another comment',
+      ''
+      'positive3'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2', 'positive3'], []
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_onlyPositive(self):
+    input_lines = [
+      'positive1',
+      'positive2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2'], []
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_onlyNegative(self):
+    input_lines = [
+      '-negative1',
+      '-negative2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = [], ['negative1', 'negative2']
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_positiveAndNegative(self):
+    input_lines = [
+      'positive1',
+      'positive2',
+      '-negative1',
+      '-negative2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2'], ['negative1', 'negative2']
+    self.assertEquals(expected, actual)
+
+
+class InitializeFilterFromArgsTest(unittest.TestCase):
+
+  def testInitializeBasicFilter(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--test-filter',
+        'FooTest.testFoo:BarTest.testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testInitializeJavaStyleFilter(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--test-filter',
+        'FooTest#testFoo:BarTest#testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testInitializeBasicIsolatedScript(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--isolated-script-test-filter',
+        'FooTest.testFoo::BarTest.testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testFilterArgWithPositiveFilterInFilterFile(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter=-negative1',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = 'positive1:positive2-negative1:negative2:negative3'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+  def testFilterFileWithPositiveFilterInFilterArg(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('-negative2\n-negative3\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter',
+          'positive1:positive2-negative1',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = 'positive1:positive2-negative1:negative2:negative3'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+  def testPositiveFilterInBothFileAndArg(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('positive1\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter',
+          'positive2',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      with self.assertRaises(test_filter.ConflictingPositiveFiltersException):
+        test_filter.InitializeFilterFromArgs(args)
+
+  def testFilterArgWithFilterFileAllNegative(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('-negative3\n-negative4\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter=-negative1:negative2',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = '-negative1:negative2:negative3:negative4'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+
+class AppendPatternsToFilter(unittest.TestCase):
+  def testAllEmpty(self):
+    expected = ''
+    actual = test_filter.AppendPatternsToFilter('', [], [])
+    self.assertEquals(actual, expected)
+  def testAppendOnlyPositiveToEmptyFilter(self):
+    expected = 'positive'
+    actual = test_filter.AppendPatternsToFilter('', ['positive'])
+    self.assertEquals(actual, expected)
+  def testAppendOnlyNegativeToEmptyFilter(self):
+    expected = '-negative'
+    actual = test_filter.AppendPatternsToFilter('',
+                                                negative_patterns=['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToEmptyFilter(self):
+    expected = 'positive-negative'
+    actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToPositiveOnlyFilter(self):
+    expected = 'positive1:positive2-negative'
+    actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
+                                                ['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToNegativeOnlyFilter(self):
+    expected = 'positive-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
+                                                ['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendPositiveToFilter(self):
+    expected = 'positive1:positive2-negative1'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                ['positive2'])
+    self.assertEquals(actual, expected)
+  def testAppendNegativeToFilter(self):
+    expected = 'positive1-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                negative_patterns=['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendBothToFilter(self):
+    expected = 'positive1:positive2-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                positive_patterns=['positive2'],
+                                                negative_patterns=['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendMultipleToFilter(self):
+    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                ['positive2', 'positive3'],
+                                                ['negative2', 'negative3'])
+    self.assertEquals(actual, expected)
+  def testRepeatedAppendToFilter(self):
+    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+    filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                       ['positive2'],
+                                                       ['negative2'])
+    actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
+                                                ['negative3'])
+    self.assertEquals(actual, expected)
+  def testAppendHashSeparatedPatternsToFilter(self):
+    expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
+    actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
+                                                       ['positive#test2'],
+                                                       ['negative#test2'])
+    self.assertEquals(actual, expected)
+
+
+class HasPositivePatterns(unittest.TestCase):
+  def testEmpty(self):
+    expected = False
+    actual = test_filter.HasPositivePatterns('')
+    self.assertEquals(actual, expected)
+  def testHasOnlyPositive(self):
+    expected = True
+    actual = test_filter.HasPositivePatterns('positive')
+    self.assertEquals(actual, expected)
+  def testHasOnlyNegative(self):
+    expected = False
+    actual = test_filter.HasPositivePatterns('-negative')
+    self.assertEquals(actual, expected)
+  def testHasBoth(self):
+    expected = True
+    actual = test_filter.HasPositivePatterns('positive-negative')
+    self.assertEquals(actual, expected)
+
+
+if __name__ == '__main__':
+  sys.exit(unittest.main())
diff --git a/src/build/android/pylib/utils/time_profile.py b/src/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000..094799c
--- /dev/null
+++ b/src/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description='operation'):
+    self._starttime = None
+    self._endtime = None
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+    self._endtime = None
+
+  def GetDelta(self):
+    """Returns the rounded delta.
+
+    Also stops the timer if Stop() has not already been called.
+    """
+    if self._endtime is None:
+      self.Stop(log=False)
+    delta = self._endtime - self._starttime
+    delta = round(delta, 2) if delta < 10 else round(delta, 1)
+    return delta
+
+  def LogResult(self):
+    """Logs the result."""
+    logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
+
+  def Stop(self, log=True):
+    """Stop profiling.
+
+    Args:
+      log: Log the delta (defaults to true).
+    """
+    self._endtime = time.time()
+    if log:
+      self.LogResult()
diff --git a/src/build/android/pylib/utils/xvfb.py b/src/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000..cb9d50e
--- /dev/null
+++ b/src/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+  """Return True if on Linux; else False."""
+  return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not _IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
diff --git a/src/build/android/pylib/valgrind_tools.py b/src/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..4689dc3
--- /dev/null
+++ b/src/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,115 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=R0201
+
+from __future__ import print_function
+
+import logging
+import sys
+
+from devil.android import device_errors
+from devil.android.valgrind_tools import base_tool
+
+
+def SetChromeTimeoutScale(device, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    device.RemovePath(path, force=True, as_root=True)
+  else:
+    device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+
+class AddressSanitizerTool(base_tool.BaseTool):
+  """AddressSanitizer tool."""
+
+  WRAPPER_NAME = '/system/bin/asanwrapper'
+  # Disable memcmp overlap check.There are blobs (gl drivers)
+  # on some android devices that use memcmp on overlapping regions,
+  # nothing we can do about that.
+  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+  def __init__(self, device):
+    super(AddressSanitizerTool, self).__init__()
+    self._device = device
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies ASan tools to the device."""
+    del device
+
+  def GetTestWrapper(self):
+    return AddressSanitizerTool.WRAPPER_NAME
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Try to set the timeout scale anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+TOOL_REGISTRY = {
+    'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return base_tool.BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(device)
+  else:
+    print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join(
+        sorted(TOOL_REGISTRY.keys()))))
+    sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+  """Pushes the files required for |tool_name| to |device|.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  """
+  if not tool_name:
+    return
+
+  clazz = TOOL_REGISTRY.get(tool_name)
+  if clazz:
+    clazz.CopyFiles(device)
+  else:
+    print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join(
+        sorted(TOOL_REGISTRY.keys()))))
+    sys.exit(1)
diff --git a/src/build/android/pylintrc b/src/build/android/pylintrc
new file mode 100644
index 0000000..2a721bf
--- /dev/null
+++ b/src/build/android/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/src/build/android/resource_sizes.gni b/src/build/android/resource_sizes.gni
new file mode 100644
index 0000000..2c91749
--- /dev/null
+++ b/src/build/android/resource_sizes.gni
@@ -0,0 +1,100 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/internal_rules.gni")
+
+# Generates a script in the bin directory that runs
+# //build/android/resource_sizes.py against the provided apk.
+#
+# Only one of apk_name or file_path should be provided.
+#
+# Variables:
+#   apk_name: The name of the apk, without the extension.
+#   file_path: The path to the apk or .minimal.apks.
+#   trichrome_chrome_path: The path to chrome apk or .minimal.apks.
+#   trichrome_webview_path: The path to webview apk or .minimal.apks.
+#   trichrome_library_path: The path to library apk or .minimal.apks.
+template("android_resource_sizes_test") {
+  generate_android_wrapper(target_name) {
+    forward_variables_from(invoker, [ "data_deps" ])
+    executable = "//build/android/resource_sizes.py"
+    wrapper_script = "$root_out_dir/bin/run_${target_name}"
+
+    assert(defined(invoker.apk_name) != defined(invoker.file_path),
+           "Exactly one of apk_name or file_path should be provided.")
+
+    deps = [ "//build/android:resource_sizes_py" ]
+    executable_args = [
+      "--output-format",
+      "histograms",
+      "--chromium-output-directory",
+      "@WrappedPath(.)",
+    ]
+
+    data = [
+      "//.vpython",
+      "//.vpython3",
+    ]
+    if (defined(invoker.trichrome_chrome_path)) {
+      data += [
+        invoker.trichrome_chrome_path,
+        invoker.trichrome_webview_path,
+        invoker.trichrome_library_path,
+      ]
+      _rebased_chrome =
+          rebase_path(invoker.trichrome_chrome_path, root_build_dir)
+      _rebased_webview =
+          rebase_path(invoker.trichrome_webview_path, root_build_dir)
+      _rebased_library =
+          rebase_path(invoker.trichrome_library_path, root_build_dir)
+
+      # apk_name used only as test suite name. Not a path in this case.
+      executable_args += [
+        "--trichrome-chrome",
+        "@WrappedPath(${_rebased_chrome})",
+        "--trichrome-webview",
+        "@WrappedPath(${_rebased_webview})",
+        "--trichrome-library",
+        "@WrappedPath(${_rebased_library})",
+        "${invoker.apk_name}",
+      ]
+    } else {
+      if (defined(invoker.apk_name)) {
+        _file_path = "$root_out_dir/apks/${invoker.apk_name}.apk"
+        data += [ "$root_out_dir/arsc/apks/${invoker.apk_name}.ap_" ]
+      } else if (defined(invoker.file_path)) {
+        _file_path = invoker.file_path
+      }
+      data += [ _file_path ]
+      _rebased_file_path = rebase_path(_file_path, root_build_dir)
+      executable_args += [ "@WrappedPath(${_rebased_file_path})" ]
+    }
+  }
+}
+
+# Generates a "size config JSON file" to specify data to be passed from recipes
+# to Python scripts for binary size measurement on bots. All filenames are
+# relative to $root_build_dir. The resulting JSON file is written to
+# "$root_build_dir/config/${invoker.name}_size_config.json".
+#
+# Variables:
+#   name: The name of the path to the generated size config JSON file.
+#   mapping_files: List of mapping files.
+#   to_resource_sizes_py: Scope containing data to pass to resource_sizes.py,
+#     processed by generate_commit_size_analysis.py.
+#   supersize_input_file: Main input for SuperSize.
+template("android_size_bot_config") {
+  _full_target_name = get_label_info(target_name, "label_no_toolchain")
+  _out_json = {
+    _HEADER = "Written by build target '${_full_target_name}'"
+    forward_variables_from(invoker,
+                           [
+                             "mapping_files",
+                             "to_resource_sizes_py",
+                             "supersize_input_file",
+                           ])
+  }
+  _output_json_path = "$root_build_dir/config/${invoker.name}_size_config.json"
+  write_file(_output_json_path, _out_json, "json")
+}
diff --git a/src/build/android/resource_sizes.py b/src/build/android/resource_sizes.py
new file mode 100755
index 0000000..c592970
--- /dev/null
+++ b/src/build/android/resource_sizes.py
@@ -0,0 +1,910 @@
+#!/usr/bin/env vpython
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Reports binary size metrics for an APK.
+
+More information at //docs/speed/binary_size/metrics.md.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+from contextlib import contextmanager
+import json
+import logging
+import os
+import posixpath
+import re
+import struct
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import devil_chromium
+from devil.android.sdk import build_tools
+from devil.utils import cmd_helper
+from devil.utils import lazy
+import method_count
+from pylib import constants
+from pylib.constants import host_paths
+
+_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
+_BUILD_UTILS_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+
+with host_paths.SysPath(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build')):
+  import gn_helpers  # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import perf_tests_results_helper  # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+  from tracing.value import convert_chart_json  # pylint: disable=import-error
+
+with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+  from util import build_utils  # pylint: disable=import-error
+  from util import zipalign  # pylint: disable=import-error
+
+
+zipalign.ApplyZipFileZipAlignFix()
+
+# Captures an entire config from aapt output.
+_AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:'
+# Matches string resource entries from aapt output.
+_AAPT_ENTRY_RE = re.compile(
+    r'resource (?P<id>\w{10}) [\w\.]+:string/.*?"(?P<val>.+?)"', re.DOTALL)
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes',
+    'benchmark_description': 'APK resource size information.',
+    'trace_rerun_options': [],
+    'charts': {}
+}
+# Macro definitions look like (something, 123) when
+# enable_resource_allowlist_generation=true.
+_RC_HEADER_RE = re.compile(r'^#define (?P<name>\w+).* (?P<id>\d+)\)?$')
+_RE_NON_LANGUAGE_PAK = re.compile(r'^assets/.*(resources|percent)\.pak$')
+_READELF_SIZES_METRICS = {
+    'text': ['.text'],
+    'data': ['.data', '.rodata', '.data.rel.ro', '.data.rel.ro.local'],
+    'relocations': ['.rel.dyn', '.rel.plt', '.rela.dyn', '.rela.plt'],
+    'unwind': [
+        '.ARM.extab', '.ARM.exidx', '.eh_frame', '.eh_frame_hdr',
+        '.ARM.exidxsentinel_section_after_text'
+    ],
+    'symbols': [
+        '.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
+        '.got.plt', '.hash', '.gnu.hash'
+    ],
+    'other': [
+        '.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
+        '.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
+        '.ARM.attributes', '.note.gnu.build-id', '.gnu.version',
+        '.gnu.version_d', '.gnu.version_r', '.interp', '.gcc_except_table'
+    ]
+}
+
+
+class _AccumulatingReporter(object):
+  def __init__(self):
+    self._combined_metrics = collections.defaultdict(int)
+
+  def __call__(self, graph_title, trace_title, value, units):
+    self._combined_metrics[(graph_title, trace_title, units)] += value
+
+  def DumpReports(self, report_func):
+    for (graph_title, trace_title,
+         units), value in sorted(self._combined_metrics.iteritems()):
+      report_func(graph_title, trace_title, value, units)
+
+
+class _ChartJsonReporter(_AccumulatingReporter):
+  def __init__(self, chartjson):
+    super(_ChartJsonReporter, self).__init__()
+    self._chartjson = chartjson
+    self.trace_title_prefix = ''
+
+  def __call__(self, graph_title, trace_title, value, units):
+    super(_ChartJsonReporter, self).__call__(graph_title, trace_title, value,
+                                             units)
+
+    perf_tests_results_helper.ReportPerfResult(
+        self._chartjson, graph_title, self.trace_title_prefix + trace_title,
+        value, units)
+
+  def SynthesizeTotals(self, unique_method_count):
+    for tup, value in sorted(self._combined_metrics.iteritems()):
+      graph_title, trace_title, units = tup
+      if trace_title == 'unique methods':
+        value = unique_method_count
+      perf_tests_results_helper.ReportPerfResult(self._chartjson, graph_title,
+                                                 'Combined_' + trace_title,
+                                                 value, units)
+
+
+def _PercentageDifference(a, b):
+  if a == 0:
+    return 0
+  return float(b - a) / a
+
+
+def _ReadZipInfoExtraFieldLength(zip_file, zip_info):
+  """Reads the value of |extraLength| from |zip_info|'s local file header.
+
+  |zip_info| has an |extra| field, but it's read from the central directory.
+  Android's zipalign tool sets the extra field only in local file headers.
+  """
+  # Refer to https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
+  zip_file.fp.seek(zip_info.header_offset + 28)
+  return struct.unpack('<H', zip_file.fp.read(2))[0]
+
+
+def _MeasureApkSignatureBlock(zip_file):
+  """Measures the size of the v2 / v3 signing block.
+
+  Refer to: https://source.android.com/security/apksigning/v2
+  """
+  # Seek to "end of central directory" struct.
+  eocd_offset_from_end = -22 - len(zip_file.comment)
+  zip_file.fp.seek(eocd_offset_from_end, os.SEEK_END)
+  assert zip_file.fp.read(4) == b'PK\005\006', (
+      'failed to find end-of-central-directory')
+
+  # Read out the "start of central directory" offset.
+  zip_file.fp.seek(eocd_offset_from_end + 16, os.SEEK_END)
+  start_of_central_directory = struct.unpack('<I', zip_file.fp.read(4))[0]
+
+  # Compute the offset after the last zip entry.
+  last_info = max(zip_file.infolist(), key=lambda i: i.header_offset)
+  last_header_size = (30 + len(last_info.filename) +
+                      _ReadZipInfoExtraFieldLength(zip_file, last_info))
+  end_of_last_file = (last_info.header_offset + last_header_size +
+                      last_info.compress_size)
+  return start_of_central_directory - end_of_last_file
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+  return cmd_helper.GetCmdOutput(
+      [tool_prefix + 'readelf'] + options + [so_path])
+
+
+def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
+  with Unzip(apk_path, filename=lib_path) as extracted_lib_path:
+    grouped_section_sizes = collections.defaultdict(int)
+    no_bits_section_sizes, section_sizes = _CreateSectionNameSizeMap(
+        extracted_lib_path, tool_prefix)
+    for group_name, section_names in _READELF_SIZES_METRICS.iteritems():
+      for section_name in section_names:
+        if section_name in section_sizes:
+          grouped_section_sizes[group_name] += section_sizes.pop(section_name)
+
+    # Consider all NOBITS sections as .bss.
+    grouped_section_sizes['bss'] = sum(
+        v for v in no_bits_section_sizes.itervalues())
+
+    # Group any unknown section headers into the "other" group.
+    for section_header, section_size in section_sizes.iteritems():
+      sys.stderr.write('Unknown elf section header: %s\n' % section_header)
+      grouped_section_sizes['other'] += section_size
+
+    return grouped_section_sizes
+
+
+def _CreateSectionNameSizeMap(so_path, tool_prefix):
+  stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix)
+  section_sizes = {}
+  no_bits_section_sizes = {}
+  # Matches  [ 2] .hash HASH 00000000006681f0 0001f0 003154 04   A  3   0  8
+  for match in re.finditer(r'\[[\s\d]+\] (\..*)$', stdout, re.MULTILINE):
+    items = match.group(1).split()
+    target = no_bits_section_sizes if items[1] == 'NOBITS' else section_sizes
+    target[items[0]] = int(items[4], 16)
+
+  return no_bits_section_sizes, section_sizes
+
+
+def _ParseManifestAttributes(apk_path):
+  # Check if the manifest specifies whether or not to extract native libs.
+  output = cmd_helper.GetCmdOutput([
+      _AAPT_PATH.read(), 'd', 'xmltree', apk_path, 'AndroidManifest.xml'])
+
+  def parse_attr(name):
+    # android:extractNativeLibs(0x010104ea)=(type 0x12)0x0
+    # android:extractNativeLibs(0x010104ea)=(type 0x12)0xffffffff
+    # dist:onDemand=(type 0x12)0xffffffff
+    m = re.search(name + r'(?:\(.*?\))?=\(type .*?\)(\w+)', output)
+    return m and int(m.group(1), 16)
+
+  skip_extract_lib = bool(parse_attr('android:extractNativeLibs'))
+  sdk_version = parse_attr('android:minSdkVersion')
+  is_feature_split = parse_attr('android:isFeatureSplit')
+  # Can use <dist:on-demand>, or <module dist:onDemand="true">.
+  on_demand = parse_attr('dist:onDemand') or 'dist:on-demand' in output
+  on_demand = bool(on_demand and is_feature_split)
+
+  return sdk_version, skip_extract_lib, on_demand
+
+
+def _NormalizeLanguagePaks(translations, factor):
+  english_pak = translations.FindByPattern(r'.*/en[-_][Uu][Ss]\.l?pak')
+  num_translations = translations.GetNumEntries()
+  ret = 0
+  if english_pak:
+    ret -= translations.ComputeZippedSize()
+    ret += int(english_pak.compress_size * num_translations * factor)
+  return ret
+
+
+def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations,
+                            out_dir):
+  """Estimates the expected overhead of untranslated strings in resources.arsc.
+
+  See http://crbug.com/677966 for why this is necessary.
+  """
+  # If there are multiple .arsc files, use the resource packaged APK instead.
+  if num_arsc_files > 1:
+    if not out_dir:
+      return -float('inf')
+    ap_name = os.path.basename(apk_path).replace('.apk', '.ap_')
+    ap_path = os.path.join(out_dir, 'arsc/apks', ap_name)
+    if not os.path.exists(ap_path):
+      raise Exception('Missing expected file: %s, try rebuilding.' % ap_path)
+    apk_path = ap_path
+
+  aapt_output = _RunAaptDumpResources(apk_path)
+  # en-rUS is in the default config and may be cluttered with non-translatable
+  # strings, so en-rGB is a better baseline for finding missing translations.
+  en_strings = _CreateResourceIdValueMap(aapt_output, 'en-rGB')
+  fr_strings = _CreateResourceIdValueMap(aapt_output, 'fr')
+
+  # en-US and en-GB will never be translated.
+  config_count = num_translations - 2
+
+  size = 0
+  for res_id, string_val in en_strings.iteritems():
+    if string_val == fr_strings[res_id]:
+      string_size = len(string_val)
+      # 7 bytes is the per-entry overhead (not specific to any string). See
+      # https://android.googlesource.com/platform/frameworks/base.git/+/android-4.2.2_r1/tools/aapt/StringPool.cpp#414.
+      # The 1.5 factor was determined experimentally and is meant to account for
+      # other languages generally having longer strings than english.
+      size += config_count * (7 + string_size * 1.5)
+
+  return int(size)
+
+
+def _CreateResourceIdValueMap(aapt_output, lang):
+  """Return a map of resource ids to string values for the given |lang|."""
+  config_re = _AAPT_CONFIG_PATTERN % lang
+  return {entry.group('id'): entry.group('val')
+          for config_section in re.finditer(config_re, aapt_output, re.DOTALL)
+          for entry in re.finditer(_AAPT_ENTRY_RE, config_section.group(0))}
+
+
+def _RunAaptDumpResources(apk_path):
+  cmd = [_AAPT_PATH.read(), 'dump', '--values', 'resources', apk_path]
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running aapt command: "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+
+class _FileGroup(object):
+  """Represents a category that apk files can fall into."""
+
+  def __init__(self, name):
+    self.name = name
+    self._zip_infos = []
+    self._extracted_multipliers = []
+
+  def AddZipInfo(self, zip_info, extracted_multiplier=0):
+    self._zip_infos.append(zip_info)
+    self._extracted_multipliers.append(extracted_multiplier)
+
+  def AllEntries(self):
+    return iter(self._zip_infos)
+
+  def GetNumEntries(self):
+    return len(self._zip_infos)
+
+  def FindByPattern(self, pattern):
+    return next((i for i in self._zip_infos if re.match(pattern, i.filename)),
+                None)
+
+  def FindLargest(self):
+    if not self._zip_infos:
+      return None
+    return max(self._zip_infos, key=lambda i: i.file_size)
+
+  def ComputeZippedSize(self):
+    return sum(i.compress_size for i in self._zip_infos)
+
+  def ComputeUncompressedSize(self):
+    return sum(i.file_size for i in self._zip_infos)
+
+  def ComputeExtractedSize(self):
+    ret = 0
+    for zi, multiplier in zip(self._zip_infos, self._extracted_multipliers):
+      ret += zi.file_size * multiplier
+    return ret
+
+  def ComputeInstallSize(self):
+    return self.ComputeExtractedSize() + self.ComputeZippedSize()
+
+
+def _AnalyzeInternal(apk_path,
+                     sdk_version,
+                     report_func,
+                     dex_stats_collector,
+                     out_dir,
+                     tool_prefix,
+                     apks_path=None,
+                     split_name=None):
+  """Analyse APK to determine size contributions of different file classes.
+
+  Returns: Normalized APK size.
+  """
+  dex_stats_collector.CollectFromZip(split_name or '', apk_path)
+  file_groups = []
+
+  def make_group(name):
+    group = _FileGroup(name)
+    file_groups.append(group)
+    return group
+
+  def has_no_extension(filename):
+    return os.path.splitext(filename)[1] == ''
+
+  native_code = make_group('Native code')
+  java_code = make_group('Java code')
+  native_resources_no_translations = make_group('Native resources (no l10n)')
+  translations = make_group('Native resources (l10n)')
+  stored_translations = make_group('Native resources stored (l10n)')
+  icu_data = make_group('ICU (i18n library) data')
+  v8_snapshots = make_group('V8 Snapshots')
+  png_drawables = make_group('PNG drawables')
+  res_directory = make_group('Non-compiled Android resources')
+  arsc = make_group('Compiled Android resources')
+  metadata = make_group('Package metadata')
+  unknown = make_group('Unknown files')
+  notices = make_group('licenses.notice file')
+  unwind_cfi = make_group('unwind_cfi (dev and canary only)')
+
+  with zipfile.ZipFile(apk_path, 'r') as apk:
+    apk_contents = apk.infolist()
+    # Account for zipalign overhead that exists in local file header.
+    zipalign_overhead = sum(
+        _ReadZipInfoExtraFieldLength(apk, i) for i in apk_contents)
+    # Account for zipalign overhead that exists in central directory header.
+    # Happens when python aligns entries in apkbuilder.py, but does not
+    # exist when using Android's zipalign. E.g. for bundle .apks files.
+    zipalign_overhead += sum(len(i.extra) for i in apk_contents)
+    signing_block_size = _MeasureApkSignatureBlock(apk)
+
+  _, skip_extract_lib, _ = _ParseManifestAttributes(apk_path)
+
+  # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
+  # L, M: ART - .odex file is compiled version of the dex file (~4x).
+  # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
+  #    shared apps (~4x).
+  # Actual multipliers calculated using "apk_operations.py disk-usage".
+  # Will need to update multipliers once apk obfuscation is enabled.
+  # E.g. with obfuscation, the 4.04 changes to 4.46.
+  speed_profile_dex_multiplier = 1.17
+  orig_filename = apks_path or apk_path
+  is_webview = 'WebView' in orig_filename
+  is_monochrome = 'Monochrome' in orig_filename
+  is_library = 'Library' in orig_filename
+  is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview
+                                         or is_library)
+  # Dex decompression overhead varies by Android version.
+  if sdk_version < 21:
+    # JellyBean & KitKat
+    dex_multiplier = 1.16
+  elif sdk_version < 24:
+    # Lollipop & Marshmallow
+    dex_multiplier = 4.04
+  elif is_shared_apk:
+    # Oreo and above, compilation_filter=speed
+    dex_multiplier = 4.04
+  else:
+    # Oreo and above, compilation_filter=speed-profile
+    dex_multiplier = speed_profile_dex_multiplier
+
+  total_apk_size = os.path.getsize(apk_path)
+  for member in apk_contents:
+    filename = member.filename
+    if filename.endswith('/'):
+      continue
+    if filename.endswith('.so'):
+      basename = posixpath.basename(filename)
+      should_extract_lib = not skip_extract_lib and basename.startswith('lib')
+      native_code.AddZipInfo(
+          member, extracted_multiplier=int(should_extract_lib))
+    elif filename.endswith('.dex'):
+      java_code.AddZipInfo(member, extracted_multiplier=dex_multiplier)
+    elif re.search(_RE_NON_LANGUAGE_PAK, filename):
+      native_resources_no_translations.AddZipInfo(member)
+    elif filename.endswith('.pak') or filename.endswith('.lpak'):
+      compressed = member.compress_type != zipfile.ZIP_STORED
+      bucket = translations if compressed else stored_translations
+      extracted_multiplier = 0
+      if compressed:
+        extracted_multiplier = int('en_' in filename or 'en-' in filename)
+      bucket.AddZipInfo(member, extracted_multiplier=extracted_multiplier)
+    elif 'icu' in filename and filename.endswith('.dat'):
+      icu_data.AddZipInfo(member)
+    elif filename.endswith('.bin'):
+      v8_snapshots.AddZipInfo(member)
+    elif filename.startswith('res/'):
+      if (filename.endswith('.png') or filename.endswith('.webp')
+          or has_no_extension(filename)):
+        png_drawables.AddZipInfo(member)
+      else:
+        res_directory.AddZipInfo(member)
+    elif filename.endswith('.arsc'):
+      arsc.AddZipInfo(member)
+    elif filename.startswith('META-INF') or filename in (
+        'AndroidManifest.xml', 'assets/webapk_dex_version.txt'):
+      metadata.AddZipInfo(member)
+    elif filename.endswith('.notice'):
+      notices.AddZipInfo(member)
+    elif filename.startswith('assets/unwind_cfi'):
+      unwind_cfi.AddZipInfo(member)
+    else:
+      unknown.AddZipInfo(member)
+
+  if apks_path:
+    # We're mostly focused on size of Chrome for non-English locales, so assume
+    # Hindi (arbitrarily chosen) locale split is installed.
+    with zipfile.ZipFile(apks_path) as z:
+      subpath = 'splits/{}-hi.apk'.format(split_name)
+      if subpath in z.namelist():
+        hindi_apk_info = z.getinfo(subpath)
+        total_apk_size += hindi_apk_info.file_size
+      else:
+        assert split_name != 'base', 'splits/base-hi.apk should always exist'
+
+  total_install_size = total_apk_size
+  total_install_size_android_go = total_apk_size
+  zip_overhead = total_apk_size
+
+  for group in file_groups:
+    actual_size = group.ComputeZippedSize()
+    install_size = group.ComputeInstallSize()
+    uncompressed_size = group.ComputeUncompressedSize()
+    extracted_size = group.ComputeExtractedSize()
+    total_install_size += extracted_size
+    zip_overhead -= actual_size
+
+    report_func('Breakdown', group.name + ' size', actual_size, 'bytes')
+    report_func('InstallBreakdown', group.name + ' size', int(install_size),
+                'bytes')
+    # Only a few metrics are compressed in the first place.
+    # To avoid over-reporting, track uncompressed size only for compressed
+    # entries.
+    if uncompressed_size != actual_size:
+      report_func('Uncompressed', group.name + ' size', uncompressed_size,
+                  'bytes')
+
+    if group is java_code and is_shared_apk:
+      # Updates are compiled using quicken, but system image uses speed-profile.
+      extracted_size = int(uncompressed_size * speed_profile_dex_multiplier)
+      total_install_size_android_go += extracted_size
+      report_func('InstallBreakdownGo', group.name + ' size',
+                  actual_size + extracted_size, 'bytes')
+    elif group is translations and apks_path:
+      # Assume Hindi rather than English (accounted for above in total_apk_size)
+      total_install_size_android_go += actual_size
+    else:
+      total_install_size_android_go += extracted_size
+
+  # Per-file zip overhead is caused by:
+  # * 30 byte entry header + len(file name)
+  # * 46 byte central directory entry + len(file name)
+  # * 0-3 bytes for zipalign.
+  report_func('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
+  report_func('InstallSize', 'APK size', total_apk_size, 'bytes')
+  report_func('InstallSize', 'Estimated installed size',
+              int(total_install_size), 'bytes')
+  if is_shared_apk:
+    report_func('InstallSize', 'Estimated installed size (Android Go)',
+                int(total_install_size_android_go), 'bytes')
+  transfer_size = _CalculateCompressedSize(apk_path)
+  report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
+
+  # Size of main dex vs remaining.
+  main_dex_info = java_code.FindByPattern('classes.dex')
+  if main_dex_info:
+    main_dex_size = main_dex_info.file_size
+    report_func('Specifics', 'main dex size', main_dex_size, 'bytes')
+    secondary_size = java_code.ComputeUncompressedSize() - main_dex_size
+    report_func('Specifics', 'secondary dex size', secondary_size, 'bytes')
+
+  main_lib_info = native_code.FindLargest()
+  native_code_unaligned_size = 0
+  for lib_info in native_code.AllEntries():
+    section_sizes = _ExtractLibSectionSizesFromApk(apk_path, lib_info.filename,
+                                                   tool_prefix)
+    native_code_unaligned_size += sum(
+        v for k, v in section_sizes.iteritems() if k != 'bss')
+    # Size of main .so vs remaining.
+    if lib_info == main_lib_info:
+      main_lib_size = lib_info.file_size
+      report_func('Specifics', 'main lib size', main_lib_size, 'bytes')
+      secondary_size = native_code.ComputeUncompressedSize() - main_lib_size
+      report_func('Specifics', 'other lib size', secondary_size, 'bytes')
+
+      for metric_name, size in section_sizes.iteritems():
+        report_func('MainLibInfo', metric_name, size, 'bytes')
+
+  # Main metric that we want to monitor for jumps.
+  normalized_apk_size = total_apk_size
+  # unwind_cfi exists only in dev, canary, and non-channel builds.
+  normalized_apk_size -= unwind_cfi.ComputeZippedSize()
+  # Sections within .so files get 4kb aligned, so use section sizes rather than
+  # file size. Also gets rid of compression.
+  normalized_apk_size -= native_code.ComputeZippedSize()
+  normalized_apk_size += native_code_unaligned_size
+  # Normalized dex size: Size within the zip + size on disk for Android Go
+  # devices running Android O (which ~= uncompressed dex size).
+  # Use a constant compression factor to account for fluctuations.
+  normalized_apk_size -= java_code.ComputeZippedSize()
+  normalized_apk_size += java_code.ComputeUncompressedSize()
+  # Don't include zipalign overhead in normalized size, since it effectively
+  # causes size changes files that proceed aligned files to be rounded.
+  # For APKs where classes.dex directly proceeds libchrome.so (the normal case),
+  # this causes small dex size changes to disappear into libchrome.so alignment.
+  normalized_apk_size -= zipalign_overhead
+  # Don't include the size of the apk's signing block because it can fluctuate
+  # by up to 4kb (from my non-scientific observations), presumably based on hash
+  # sizes.
+  normalized_apk_size -= signing_block_size
+
+  # Unaligned size should be ~= uncompressed size or something is wrong.
+  # As of now, padding_fraction ~= .007
+  padding_fraction = -_PercentageDifference(
+      native_code.ComputeUncompressedSize(), native_code_unaligned_size)
+  # Ignore this check for small / no native code
+  if native_code.ComputeUncompressedSize() > 1000000:
+    assert 0 <= padding_fraction < .02, (
+        'Padding was: {} (file_size={}, sections_sum={})'.format(
+            padding_fraction, native_code.ComputeUncompressedSize(),
+            native_code_unaligned_size))
+
+  if apks_path:
+    # Locale normalization not needed when measuring only one locale.
+    # E.g. a change that adds 300 chars of unstranslated strings would cause the
+    # metric to be off by only 390 bytes (assuming a multiplier of 2.3 for
+    # Hindi).
+    pass
+  else:
+    # Avoid noise caused when strings change and translations haven't yet been
+    # updated.
+    num_translations = translations.GetNumEntries()
+    num_stored_translations = stored_translations.GetNumEntries()
+
+    if num_translations > 1:
+      # Multipliers found by looking at MonochromePublic.apk and seeing how much
+      # smaller en-US.pak is relative to the average locale.pak.
+      normalized_apk_size += _NormalizeLanguagePaks(translations, 1.17)
+    if num_stored_translations > 1:
+      normalized_apk_size += _NormalizeLanguagePaks(stored_translations, 1.43)
+    if num_translations + num_stored_translations > 1:
+      if num_translations == 0:
+        # WebView stores all locale paks uncompressed.
+        num_arsc_translations = num_stored_translations
+      else:
+        # Monochrome has more configurations than Chrome since it includes
+        # WebView (which supports more locales), but these should mostly be
+        # empty so ignore them here.
+        num_arsc_translations = num_translations
+      normalized_apk_size += _NormalizeResourcesArsc(apk_path,
+                                                     arsc.GetNumEntries(),
+                                                     num_arsc_translations,
+                                                     out_dir)
+
+  # It will be -Inf for .apk files with multiple .arsc files and no out_dir set.
+  if normalized_apk_size < 0:
+    sys.stderr.write('Skipping normalized_apk_size (no output directory set)\n')
+  else:
+    report_func('Specifics', 'normalized apk size', normalized_apk_size,
+                'bytes')
+  # The "file count" metric cannot be grouped with any other metrics when the
+  # end result is going to be uploaded to the perf dashboard in the HistogramSet
+  # format due to mixed units (bytes vs. zip entries) causing malformed
+  # summaries to be generated.
+  # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
+  # ever supported.
+  report_func('FileCount', 'file count', len(apk_contents), 'zip entries')
+
+  for info in unknown.AllEntries():
+    sys.stderr.write(
+        'Unknown entry: %s %d\n' % (info.filename, info.compress_size))
+  return normalized_apk_size
+
+
+def _CalculateCompressedSize(file_path):
+  CHUNK_SIZE = 256 * 1024
+  compressor = zlib.compressobj()
+  total_size = 0
+  with open(file_path, 'rb') as f:
+    for chunk in iter(lambda: f.read(CHUNK_SIZE), ''):
+      total_size += len(compressor.compress(chunk))
+  total_size += len(compressor.flush())
+  return total_size
+
+
+@contextmanager
+def Unzip(zip_file, filename=None):
+  """Utility for temporary use of a single file in a zip archive."""
+  with build_utils.TempDir() as unzipped_dir:
+    unzipped_files = build_utils.ExtractAll(
+        zip_file, unzipped_dir, True, pattern=filename)
+    if len(unzipped_files) == 0:
+      raise Exception(
+          '%s not found in %s' % (filename, zip_file))
+    yield unzipped_files[0]
+
+
+def _ConfigOutDirAndToolsPrefix(out_dir):
+  if out_dir:
+    constants.SetOutputDirectory(out_dir)
+  else:
+    try:
+      # Triggers auto-detection when CWD == output directory.
+      constants.CheckOutputDirectory()
+      out_dir = constants.GetOutDirectory()
+    except Exception:  # pylint: disable=broad-except
+      return out_dir, ''
+  build_vars = gn_helpers.ReadBuildVars(out_dir)
+  tool_prefix = os.path.join(out_dir, build_vars['android_tool_prefix'])
+  return out_dir, tool_prefix
+
+
+def _IterSplits(namelist):
+  for subpath in namelist:
+    # Looks for paths like splits/vr-master.apk, splits/vr-hi.apk.
+    name_parts = subpath.split('/')
+    if name_parts[0] == 'splits' and len(name_parts) == 2:
+      name_parts = name_parts[1].split('-')
+      if len(name_parts) == 2:
+        split_name, config_name = name_parts
+        if config_name == 'master.apk':
+          yield subpath, split_name
+
+
+def _ExtractToTempFile(zip_obj, subpath, temp_file):
+  temp_file.seek(0)
+  temp_file.truncate()
+  temp_file.write(zip_obj.read(subpath))
+  temp_file.flush()
+
+
+def _AnalyzeApkOrApks(report_func, apk_path, args):
+  # Create DexStatsCollector here to track unique methods across base & chrome
+  # modules.
+  dex_stats_collector = method_count.DexStatsCollector()
+  out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir)
+
+  if apk_path.endswith('.apk'):
+    sdk_version, _, _ = _ParseManifestAttributes(apk_path)
+    _AnalyzeInternal(apk_path, sdk_version, report_func, dex_stats_collector,
+                     out_dir, tool_prefix)
+  elif apk_path.endswith('.apks'):
+    with tempfile.NamedTemporaryFile(suffix='.apk') as f:
+      with zipfile.ZipFile(apk_path) as z:
+        # Currently bundletool is creating two apks when .apks is created
+        # without specifying an sdkVersion. Always measure the one with an
+        # uncompressed shared library.
+        try:
+          info = z.getinfo('splits/base-master_2.apk')
+        except KeyError:
+          info = z.getinfo('splits/base-master.apk')
+        _ExtractToTempFile(z, info.filename, f)
+        sdk_version, _, _ = _ParseManifestAttributes(f.name)
+
+        orig_report_func = report_func
+        report_func = _AccumulatingReporter()
+
+        def do_measure(split_name, on_demand):
+          logging.info('Measuring %s on_demand=%s', split_name, on_demand)
+          # Use no-op reporting functions to get normalized size for DFMs.
+          inner_report_func = report_func
+          inner_dex_stats_collector = dex_stats_collector
+          if on_demand:
+            inner_report_func = lambda *_: None
+            inner_dex_stats_collector = method_count.DexStatsCollector()
+
+          size = _AnalyzeInternal(f.name,
+                                  sdk_version,
+                                  inner_report_func,
+                                  inner_dex_stats_collector,
+                                  out_dir,
+                                  tool_prefix,
+                                  apks_path=apk_path,
+                                  split_name=split_name)
+          report_func('DFM_' + split_name, 'Size with hindi', size, 'bytes')
+
+        # Measure base outside of the loop since we've already extracted it.
+        do_measure('base', on_demand=False)
+
+        for subpath, split_name in _IterSplits(z.namelist()):
+          if split_name != 'base':
+            _ExtractToTempFile(z, subpath, f)
+            _, _, on_demand = _ParseManifestAttributes(f.name)
+            do_measure(split_name, on_demand=on_demand)
+
+        report_func.DumpReports(orig_report_func)
+        report_func = orig_report_func
+  else:
+    raise Exception('Unknown file type: ' + apk_path)
+
+  # Report dex stats outside of _AnalyzeInternal() so that the "unique methods"
+  # metric is not just the sum of the base and chrome modules.
+  for metric, count in dex_stats_collector.GetTotalCounts().items():
+    report_func('Dex', metric, count, 'entries')
+  report_func('Dex', 'unique methods',
+              dex_stats_collector.GetUniqueMethodCount(), 'entries')
+  report_func('DexCache', 'DexCache',
+              dex_stats_collector.GetDexCacheSize(pre_oreo=sdk_version < 26),
+              'bytes')
+
+  return dex_stats_collector
+
+
+def _ResourceSizes(args):
+  chartjson = _BASE_CHART.copy() if args.output_format else None
+  reporter = _ChartJsonReporter(chartjson)
+  # Create DexStatsCollector here to track unique methods across trichrome APKs.
+  dex_stats_collector = method_count.DexStatsCollector()
+
+  specs = [
+      ('Chrome_', args.trichrome_chrome),
+      ('WebView_', args.trichrome_webview),
+      ('Library_', args.trichrome_library),
+  ]
+  for prefix, path in specs:
+    if path:
+      reporter.trace_title_prefix = prefix
+      child_dex_stats_collector = _AnalyzeApkOrApks(reporter, path, args)
+      dex_stats_collector.MergeFrom(prefix, child_dex_stats_collector)
+
+  if any(path for _, path in specs):
+    reporter.SynthesizeTotals(dex_stats_collector.GetUniqueMethodCount())
+  else:
+    _AnalyzeApkOrApks(reporter, args.input, args)
+
+  if chartjson:
+    _DumpChartJson(args, chartjson)
+
+
+def _DumpChartJson(args, chartjson):
+  if args.output_file == '-':
+    json_file = sys.stdout
+  elif args.output_file:
+    json_file = open(args.output_file, 'w')
+  else:
+    results_path = os.path.join(args.output_dir, 'results-chart.json')
+    logging.critical('Dumping chartjson to %s', results_path)
+    json_file = open(results_path, 'w')
+
+  json.dump(chartjson, json_file, indent=2)
+
+  if json_file is not sys.stdout:
+    json_file.close()
+
+  # We would ideally generate a histogram set directly instead of generating
+  # chartjson then converting. However, perf_tests_results_helper is in
+  # //build, which doesn't seem to have any precedent for depending on
+  # anything in Catapult. This can probably be fixed, but since this doesn't
+  # need to be super fast or anything, converting is a good enough solution
+  # for the time being.
+  if args.output_format == 'histograms':
+    histogram_result = convert_chart_json.ConvertChartJson(results_path)
+    if histogram_result.returncode != 0:
+      raise Exception('chartjson conversion failed with error: ' +
+                      histogram_result.stdout)
+
+    histogram_path = os.path.join(args.output_dir, 'perf_results.json')
+    logging.critical('Dumping histograms to %s', histogram_path)
+    with open(histogram_path, 'w') as json_file:
+      json_file.write(histogram_result.stdout)
+
+
+def main():
+  argparser = argparse.ArgumentParser(description='Print APK size metrics.')
+  argparser.add_argument(
+      '--min-pak-resource-size',
+      type=int,
+      default=20 * 1024,
+      help='Minimum byte size of displayed pak resources.')
+  argparser.add_argument(
+      '--chromium-output-directory',
+      dest='out_dir',
+      type=os.path.realpath,
+      help='Location of the build artifacts.')
+  argparser.add_argument(
+      '--chartjson',
+      action='store_true',
+      help='DEPRECATED. Use --output-format=chartjson '
+      'instead.')
+  argparser.add_argument(
+      '--output-format',
+      choices=['chartjson', 'histograms'],
+      help='Output the results to a file in the given '
+      'format instead of printing the results.')
+  argparser.add_argument('--loadable_module', help='Obsolete (ignored).')
+
+  # Accepted to conform to the isolated script interface, but ignored.
+  argparser.add_argument(
+      '--isolated-script-test-filter', help=argparse.SUPPRESS)
+  argparser.add_argument(
+      '--isolated-script-test-perf-output',
+      type=os.path.realpath,
+      help=argparse.SUPPRESS)
+
+  output_group = argparser.add_mutually_exclusive_group()
+
+  output_group.add_argument(
+      '--output-dir', default='.', help='Directory to save chartjson to.')
+  output_group.add_argument(
+      '--output-file',
+      help='Path to output .json (replaces --output-dir). Works only for '
+      '--output-format=chartjson')
+  output_group.add_argument(
+      '--isolated-script-test-output',
+      type=os.path.realpath,
+      help='File to which results will be written in the '
+      'simplified JSON output format.')
+
+  argparser.add_argument('input', help='Path to .apk or .apks file to measure.')
+  trichrome_group = argparser.add_argument_group(
+      'Trichrome inputs',
+      description='When specified, |input| is used only as Test suite name.')
+  trichrome_group.add_argument(
+      '--trichrome-chrome', help='Path to Trichrome Chrome .apks')
+  trichrome_group.add_argument(
+      '--trichrome-webview', help='Path to Trichrome WebView .apk(s)')
+  trichrome_group.add_argument(
+      '--trichrome-library', help='Path to Trichrome Library .apk')
+  args = argparser.parse_args()
+
+  devil_chromium.Initialize(output_directory=args.out_dir)
+
+  # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
+  if args.chartjson:
+    args.output_format = 'chartjson'
+
+  isolated_script_output = {'valid': False, 'failures': []}
+
+  test_name = 'resource_sizes (%s)' % os.path.basename(args.input)
+
+  if args.isolated_script_test_output:
+    args.output_dir = os.path.join(
+        os.path.dirname(args.isolated_script_test_output), test_name)
+    if not os.path.exists(args.output_dir):
+      os.makedirs(args.output_dir)
+
+  try:
+    _ResourceSizes(args)
+    isolated_script_output = {
+        'valid': True,
+        'failures': [],
+    }
+  finally:
+    if args.isolated_script_test_output:
+      results_path = os.path.join(args.output_dir, 'test_results.json')
+      with open(results_path, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+      with open(args.isolated_script_test_output, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/resource_sizes.pydeps b/src/build/android/resource_sizes.pydeps
new file mode 100644
index 0000000..d956f5b
--- /dev/null
+++ b/src/build/android/resource_sizes.pydeps
@@ -0,0 +1,58 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/resource_sizes.pydeps build/android/resource_sizes.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/six/six.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
+../gn_helpers.py
+../util/lib/common/perf_result_data_type.py
+../util/lib/common/perf_tests_results_helper.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/zipalign.py
+method_count.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/dex/__init__.py
+pylib/dex/dex_parser.py
+resource_sizes.py
diff --git a/src/build/android/screenshot.py b/src/build/android/screenshot.py
new file mode 100755
index 0000000..523d859
--- /dev/null
+++ b/src/build/android/screenshot.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import screenshot
+
+if __name__ == '__main__':
+  devil_chromium.Initialize()
+  sys.exit(screenshot.main())
diff --git a/src/build/android/stacktrace/BUILD.gn b/src/build/android/stacktrace/BUILD.gn
new file mode 100644
index 0000000..ce13a15
--- /dev/null
+++ b/src/build/android/stacktrace/BUILD.gn
@@ -0,0 +1,28 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_library("java_deobfuscate_java") {
+  sources = [ "java/org/chromium/build/FlushingReTrace.java" ]
+
+  # Avoid using java_prebuilt() to ensure all uses go through the checked-in
+  # wrapper script.
+  input_jars_paths = [
+    "//third_party/proguard/lib/proguard603.jar",
+    "//third_party/proguard/lib/retrace603.jar",
+  ]
+}
+
+# Use the checked-in copy of the wrapper script & .jar rather than the built
+# one to simplify usage of the tool.
+group("java_deobfuscate") {
+  data = [
+    "java_deobfuscate.py",
+    "java_deobfuscate.jar",
+    "//third_party/proguard/lib/proguard603.jar",
+    "//third_party/proguard/lib/retrace603.jar",
+  ]
+  deps = [ "//third_party/jdk:java_data" ]
+}
diff --git a/src/build/android/stacktrace/README.md b/src/build/android/stacktrace/README.md
new file mode 100644
index 0000000..58ea94b
--- /dev/null
+++ b/src/build/android/stacktrace/README.md
@@ -0,0 +1,28 @@
+# java_deobfuscate.py
+
+A wrapper around ProGuard's ReTrace tool, which:
+
+1) Updates the regular expression used to identify stack lines, and
+2) Streams its output.
+
+The second point here is what allows you to run:
+
+    adb logcat | build/android/stacktrace/java_deobfuscate.py out/Default/apks/ChromePublic.apk.mapping
+
+And have it actually show output without logcat terminating.
+
+
+## Update Instructions:
+
+    ninja -C out/Release java_deobfuscate
+    cp out/Release/lib.java/build/android/stacktrace/java_deobfuscate.jar build/android/stacktrace
+
+# stackwalker.py
+
+Extracts Breakpad microdumps from a log file and uses `stackwalker` to symbolize
+them.
+
+
+# crashpad_stackwalker.py
+
+Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
diff --git a/src/build/android/stacktrace/crashpad_stackwalker.py b/src/build/android/stacktrace/crashpad_stackwalker.py
new file mode 100755
index 0000000..9616a54
--- /dev/null
+++ b/src/build/android/stacktrace/crashpad_stackwalker.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
+# All the non-trivial operations are performed by generate_breakpad_symbols.py,
+# dump_syms, minidump_dump and minidump_stackwalk.
+
+import argparse
+import logging
+import os
+import posixpath
+import re
+import sys
+import shutil
+import subprocess
+import tempfile
+
+_BUILD_ANDROID_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(_BUILD_ANDROID_PATH)
+import devil_chromium
+from devil.android import device_utils
+from devil.utils import timeout_retry
+
+
+def _CreateSymbolsDir(build_path, dynamic_library_names):
+  generator = os.path.normpath(
+      os.path.join(_BUILD_ANDROID_PATH, '..', '..', 'components', 'crash',
+                   'content', 'tools', 'generate_breakpad_symbols.py'))
+  syms_dir = os.path.join(build_path, 'crashpad_syms')
+  shutil.rmtree(syms_dir, ignore_errors=True)
+  os.mkdir(syms_dir)
+  for lib in dynamic_library_names:
+    unstripped_library_path = os.path.join(build_path, 'lib.unstripped', lib)
+    if not os.path.exists(unstripped_library_path):
+      continue
+    logging.info('Generating symbols for: %s', unstripped_library_path)
+    cmd = [
+        generator,
+        '--symbols-dir',
+        syms_dir,
+        '--build-dir',
+        build_path,
+        '--binary',
+        unstripped_library_path,
+        '--platform',
+        'android',
+    ]
+    return_code = subprocess.call(cmd)
+    if return_code != 0:
+      logging.error('Could not extract symbols, command failed: %s',
+                    ' '.join(cmd))
+  return syms_dir
+
+
+def _ChooseLatestCrashpadDump(device, crashpad_dump_path):
+  if not device.PathExists(crashpad_dump_path):
+    logging.warning('Crashpad dump directory does not exist: %s',
+                    crashpad_dump_path)
+    return None
+  latest = None
+  latest_timestamp = 0
+  for crashpad_file in device.ListDirectory(crashpad_dump_path):
+    if crashpad_file.endswith('.dmp'):
+      stat = device.StatPath(posixpath.join(crashpad_dump_path, crashpad_file))
+      current_timestamp = stat['st_mtime']
+      if current_timestamp > latest_timestamp:
+        latest_timestamp = current_timestamp
+        latest = crashpad_file
+  return latest
+
+
+def _ExtractLibraryNamesFromDump(build_path, dump_path):
+  default_library_name = 'libmonochrome.so'
+  dumper_path = os.path.join(build_path, 'minidump_dump')
+  if not os.access(dumper_path, os.X_OK):
+    logging.warning(
+        'Cannot extract library name from dump because %s is not found, '
+        'default to: %s', dumper_path, default_library_name)
+    return [default_library_name]
+  p = subprocess.Popen([dumper_path, dump_path],
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE)
+  stdout, stderr = p.communicate()
+  if p.returncode != 0:
+    # Dumper errors often do not affect stack walkability, just a warning.
+    logging.warning('Reading minidump failed with output:\n%s', stderr)
+
+  library_names = []
+  module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
+                                      r'"(?P<library_name>lib[^. ]+.so)"')
+  in_module = False
+  for line in stdout.splitlines():
+    line = line.lstrip().rstrip('\n')
+    if line == 'MDRawModule':
+      in_module = True
+      continue
+    if line == '':
+      in_module = False
+      continue
+    if in_module:
+      m = module_library_line_re.match(line)
+      if m:
+        library_names.append(m.group('library_name'))
+  if not library_names:
+    logging.warning(
+        'Could not find any library name in the dump, '
+        'default to: %s', default_library_name)
+    return [default_library_name]
+  return library_names
+
+
+def main():
+  logging.basicConfig(level=logging.INFO)
+  parser = argparse.ArgumentParser(
+      description='Fetches Crashpad dumps from a given device, '
+      'walks and symbolizes the stacks.')
+  parser.add_argument('--device', required=True, help='Device serial number')
+  parser.add_argument('--adb-path', help='Path to the "adb" command')
+  parser.add_argument(
+      '--build-path',
+      required=True,
+      help='Build output directory, equivalent to CHROMIUM_OUTPUT_DIR')
+  parser.add_argument(
+      '--chrome-cache-path',
+      required=True,
+      help='Directory on the device where Chrome stores cached files,'
+      ' crashpad stores dumps in a subdirectory of it')
+  args = parser.parse_args()
+
+  stackwalk_path = os.path.join(args.build_path, 'minidump_stackwalk')
+  if not os.path.exists(stackwalk_path):
+    logging.error('Missing minidump_stackwalk executable')
+    return 1
+
+  devil_chromium.Initialize(output_directory=args.build_path,
+                            adb_path=args.adb_path)
+  device = device_utils.DeviceUtils(args.device)
+
+  device_crashpad_path = posixpath.join(args.chrome_cache_path, 'Crashpad',
+                                        'pending')
+
+  def CrashpadDumpExists():
+    return _ChooseLatestCrashpadDump(device, device_crashpad_path)
+
+  crashpad_file = timeout_retry.WaitFor(
+      CrashpadDumpExists, wait_period=1, max_tries=9)
+  if not crashpad_file:
+    logging.error('Could not locate a crashpad dump')
+    return 1
+
+  dump_dir = tempfile.mkdtemp()
+  symbols_dir = None
+  try:
+    device.PullFile(
+        device_path=posixpath.join(device_crashpad_path, crashpad_file),
+        host_path=dump_dir)
+    dump_full_path = os.path.join(dump_dir, crashpad_file)
+    library_names = _ExtractLibraryNamesFromDump(args.build_path,
+                                                 dump_full_path)
+    symbols_dir = _CreateSymbolsDir(args.build_path, library_names)
+    stackwalk_cmd = [stackwalk_path, dump_full_path, symbols_dir]
+    subprocess.call(stackwalk_cmd)
+  finally:
+    shutil.rmtree(dump_dir, ignore_errors=True)
+    if symbols_dir:
+      shutil.rmtree(symbols_dir, ignore_errors=True)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
new file mode 100644
index 0000000..baa9313
--- /dev/null
+++ b/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
@@ -0,0 +1,116 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+
+import proguard.retrace.ReTrace;
+
+/**
+ * A wrapper around ReTrace that:
+ *  1. Hardcodes a more useful line regular expression
+ *  2. Disables output buffering
+ */
+public class FlushingReTrace {
+    // E.g.: D/ConnectivityService(18029): Message
+    // E.g.: W/GCM     ( 151): Message
+    // E.g.: 09-08 14:22:59.995 18029 18055 I ProcessStatsService: Message
+    // E.g.: 09-08 14:30:59.145 17731 18020 D MDnsDS  : Message
+    private static final String LOGCAT_PREFIX =
+            "(?:[VDIWEF]/.*?\\( *\\d+\\): |\\d\\d-\\d\\d [0-9:. ]+[VDIWEF] .*?: )?";
+
+    // Note: Order of these sub-patterns defines their precedence.
+    // Note: Deobfuscation of methods without the presense of line numbers basically never works.
+    // There is a test for these pattern at //build/android/stacktrace/java_deobfuscate_test.py
+    private static final String LINE_PARSE_REGEX =
+            // Eagerly match logcat prefix to avoid conflicting with the patterns below.
+            LOGCAT_PREFIX
+            + "(?:"
+            // Based on default ReTrace regex, but with whitespaces allowed in file:line parentheses
+            // and "at" changed to to allow :
+            // E.g.: 06-22 13:58:02.895  4674  4674 E THREAD_STATE:     bLA.a( PG : 173 )
+            // Normal stack trace lines look like:
+            // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682)
+            + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|"
+            // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA'
+            // on a null object reference
+            + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|"
+            // E.g.: java.lang.VerifyError: bLA
+            + "(?:java\\.lang\\.VerifyError: %c)|"
+            // E.g.: java.lang.NoSuchFieldError: No instance field e of type L...; in class LbxK;
+            + "(?:java\\.lang\\.NoSuchFieldError: No instance field %f of type .*? in class L%C;)|"
+            // E.g.: Object of type Clazz was not destroyed... (See LifetimeAssert.java)
+            + "(?:.*?Object of type %c .*)|"
+            // E.g.: VFY: unable to resolve new-instance 3810 (LSome/Framework/Class;) in Lfoo/Bar;
+            + "(?:.*L%C;.*)|"
+            // E.g.: END SomeTestClass#someMethod
+            + "(?:.*?%c#%m.*?)|"
+            // Special-case for a common junit logcat message:
+            // E.g.: java.lang.NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+            + "(?:.* isTestClass for %c)|"
+            // E.g.: Caused by: java.lang.RuntimeException: Intentional Java Crash
+            + "(?:Caused by: %c:.*)|"
+            // Quoted values and lines that end with a class / class+method:
+            // E.g.: The class: Foo
+            // E.g.: INSTRUMENTATION_STATUS: class=Foo
+            // E.g.: NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+            // E.g.: Could not find class 'SomeFrameworkClass', referenced from method Foo.bar
+            // E.g.: Could not find method SomeFrameworkMethod, referenced from method Foo.bar
+            // E.g.: The member "Foo.bar"
+            // E.g.: The class "Foobar"
+            // Be careful about matching %c without %m since language tags look like class names.
+            + "(?:.*?%c\\.%m)|"
+            + "(?:.*?\"%c\\.%m\".*)|"
+            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?\"%c\".*)|"
+            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?%c)|"
+            // E.g.: java.lang.RuntimeException: Intentional Java Crash
+            + "(?:%c:.*)|"
+            // See if entire line matches a class name (e.g. for manual deobfuscation)
+            + "(?:%c)"
+            + ")";
+
+    private static void usage() {
+        System.err.println("Usage: echo $OBFUSCATED_CLASS | java_deobfuscate Foo.apk.mapping");
+        System.err.println("Usage: java_deobfuscate Foo.apk.mapping < foo.log");
+        System.err.println("Note: Deobfuscation of symbols outside the context of stack "
+                + "traces will work only when lines match the regular expression defined "
+                + "in FlushingReTrace.java.");
+        System.err.println("Also: Deobfuscation of method names without associated line "
+                + "numbers does not seem to work.");
+        System.exit(1);
+    }
+
+    public static void main(String[] args) {
+        if (args.length != 1 || args[0].startsWith("-")) {
+            usage();
+        }
+
+        File mappingFile = new File(args[0]);
+        try {
+            LineNumberReader reader = new LineNumberReader(
+                    new BufferedReader(new InputStreamReader(System.in, "UTF-8")));
+
+            // Enabling autoFlush is the main difference from ReTrace.main().
+            boolean autoFlush = true;
+            PrintWriter writer =
+                    new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"), autoFlush);
+
+            boolean verbose = false;
+            new ReTrace(LINE_PARSE_REGEX, verbose, mappingFile).retrace(reader, writer);
+        } catch (IOException ex) {
+            // Print a verbose stack trace.
+            ex.printStackTrace();
+            System.exit(1);
+        }
+
+        System.exit(0);
+    }
+}
diff --git a/src/build/android/stacktrace/java_deobfuscate.jar b/src/build/android/stacktrace/java_deobfuscate.jar
new file mode 100644
index 0000000..36a1b70
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate.jar
Binary files differ
diff --git a/src/build/android/stacktrace/java_deobfuscate.py b/src/build/android/stacktrace/java_deobfuscate.py
new file mode 100755
index 0000000..8c231ec
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper script for java_deobfuscate.
+
+This is also a buildable target, but having it pre-built here simplifies usage.
+"""
+
+import os
+import sys
+
+DIR_SOURCE_ROOT = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '../../../'))
+
+
+def main():
+  classpath = [
+      os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'stacktrace',
+                   'java_deobfuscate.jar'),
+      os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib',
+                   'proguard603.jar'),
+      os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib',
+                   'retrace603.jar'),
+  ]
+  java_path = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+                           'bin', 'java')
+
+  cmd = [
+      java_path, '-classpath', ':'.join(classpath),
+      'org.chromium.build.FlushingReTrace'
+  ]
+  cmd.extend(sys.argv[1:])
+  os.execvp(cmd[0], cmd)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/stacktrace/java_deobfuscate_test.py b/src/build/android/stacktrace/java_deobfuscate_test.py
new file mode 100755
index 0000000..1bf81c9
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate_test.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_deobfuscate."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+# Set by command-line argument.
+_JAVA_DEOBFUSCATE_PATH = None
+
+LINE_PREFIXES = [
+    '',
+    # logcat -v threadtime
+    '09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ',
+    # logcat
+    'W/GCM     (15158): ',
+    'W/GCM     (  158): ',
+]
+
+TEST_MAP = """\
+this.was.Deobfuscated -> FOO:
+    int[] mFontFamily -> a
+    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+never.Deobfuscated -> NOTFOO:
+    int[] mFontFamily -> a
+    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+"""
+
+TEST_DATA = [
+    '',
+    'FOO',
+    'FOO.bar',
+    'Here is a FOO',
+    'Here is a class FOO',
+    'Here is a class FOO baz',
+    'Here is a "FOO" baz',
+    'Here is a type "FOO" baz',
+    'Here is a "FOO.bar" baz',
+    'SomeError: SomeFrameworkClass in isTestClass for FOO',
+    'Here is a FOO.bar',
+    'Here is a FOO.bar baz',
+    'END FOO#bar',
+    'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
+    'FOO: Error message',
+    'Caused by: FOO: Error message',
+    '\tat FOO.bar(PG:1)',
+    '\t at\t FOO.bar\t (\t PG:\t 1\t )',
+    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+     ' java.lang.NullPointerException: Attempt to invoke interface method'
+     ' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
+    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+     ' \'int[] FOO.a\' on a null object reference'),
+    'java.lang.VerifyError: FOO',
+    ('java.lang.NoSuchFieldError: No instance field a of type '
+     'Ljava/lang/Class; in class LFOO;'),
+    'NOTFOO: Object of type FOO was not destroyed...',
+]
+
+EXPECTED_OUTPUT = [
+    '',
+    'this.was.Deobfuscated',
+    'this.was.Deobfuscated.someMethod',
+    'Here is a FOO',
+    'Here is a class this.was.Deobfuscated',
+    'Here is a class FOO baz',
+    'Here is a "FOO" baz',
+    'Here is a type "this.was.Deobfuscated" baz',
+    'Here is a "this.was.Deobfuscated.someMethod" baz',
+    'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
+    'Here is a this.was.Deobfuscated.someMethod',
+    'Here is a FOO.bar baz',
+    'END this.was.Deobfuscated#someMethod',
+    'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
+    'this.was.Deobfuscated: Error message',
+    'Caused by: this.was.Deobfuscated: Error message',
+    '\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
+    ('\t at\t this.was.Deobfuscated.someMethod\t '
+     '(\t Deobfuscated.java:\t 65\t )'),
+    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+     ' java.lang.NullPointerException: Attempt to invoke interface method'
+     ' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
+     ' null object reference'),
+    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+     ' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
+    'java.lang.VerifyError: this.was.Deobfuscated',
+    ('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
+     'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
+    'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
+]
+TEST_DATA = [s + '\n' for s in TEST_DATA]
+EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
+
+
+class JavaDeobfuscateTest(unittest.TestCase):
+
+  def __init__(self, *args, **kwargs):
+    super(JavaDeobfuscateTest, self).__init__(*args, **kwargs)
+    self._map_file = None
+
+  def setUp(self):
+    self._map_file = tempfile.NamedTemporaryFile()
+    self._map_file.write(TEST_MAP)
+    self._map_file.flush()
+
+  def tearDown(self):
+    if self._map_file:
+      self._map_file.close()
+
+  def _testImpl(self, input_lines=None, expected_output_lines=None,
+                prefix=''):
+    self.assertTrue(bool(input_lines) == bool(expected_output_lines))
+
+    if not input_lines:
+      input_lines = [prefix + x for x in TEST_DATA]
+    if not expected_output_lines:
+      expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
+
+    cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
+    proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+    proc_output, _ = proc.communicate(''.join(input_lines))
+    actual_output_lines = proc_output.splitlines(True)
+    for actual, expected in zip(actual_output_lines, expected_output_lines):
+      self.assertTrue(
+          actual == expected or actual.replace('bar', 'someMethod') == expected,
+          msg=''.join([
+              'Deobfuscation failed.\n',
+              '  actual:   %s' % actual,
+              '  expected: %s' % expected]))
+
+  def testNoPrefix(self):
+    self._testImpl(prefix='')
+
+  def testThreadtimePrefix(self):
+    self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
+
+  def testStandardPrefix(self):
+    self._testImpl(prefix='W/GCM     (15158): ')
+
+  def testStandardPrefixWithPadding(self):
+    self._testImpl(prefix='W/GCM     (  158): ')
+
+  @unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
+  def testIndefiniteHang(self):
+    # Test for crbug.com/876539.
+    self._testImpl(
+        input_lines=[
+            'VFY: unable to resolve virtual method 2: LFOO;'
+                + '.onDescendantInvalidated '
+                + '(Landroid/view/View;Landroid/view/View;)V',
+        ],
+        expected_output_lines=[
+            'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
+                + '.onDescendantInvalidated '
+                + '(Landroid/view/View;Landroid/view/View;)V',
+        ])
+
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
+                      required=True)
+  known_args, unittest_args = parser.parse_known_args()
+  _JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
+  unittest_args = [sys.argv[0]] + unittest_args
+  unittest.main(argv=unittest_args)
diff --git a/src/build/android/stacktrace/stackwalker.py b/src/build/android/stacktrace/stackwalker.py
new file mode 100755
index 0000000..4f2782f
--- /dev/null
+++ b/src/build/android/stacktrace/stackwalker.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import sys
+import tempfile
+
+if __name__ == '__main__':
+  sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+
+_MICRODUMP_BEGIN = re.compile(
+    '.*google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----')
+_MICRODUMP_END = re.compile(
+    '.*google-breakpad: -----END BREAKPAD MICRODUMP-----')
+
+""" Example Microdump
+<timestamp>  6270  6131 F google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----
+<timestamp>  6270  6131 F google-breakpad: V Chrome_Android:54.0.2790.0
+...
+<timestamp>  6270  6131 F google-breakpad: -----END BREAKPAD MICRODUMP-----
+
+"""
+
+
+def GetMicroDumps(dump_path):
+  """Returns all microdumps found in given log file
+
+  Args:
+    dump_path: Path to the log file.
+
+  Returns:
+    List of all microdumps as lists of lines.
+  """
+  with open(dump_path, 'r') as d:
+    data = d.read()
+  all_dumps = []
+  current_dump = None
+  for line in data.splitlines():
+    if current_dump is not None:
+      if _MICRODUMP_END.match(line):
+        current_dump.append(line)
+        all_dumps.append(current_dump)
+        current_dump = None
+      else:
+        current_dump.append(line)
+    elif _MICRODUMP_BEGIN.match(line):
+      current_dump = []
+      current_dump.append(line)
+  return all_dumps
+
+
+def SymbolizeMicroDump(stackwalker_binary_path, dump, symbols_path):
+  """Runs stackwalker on microdump.
+
+  Runs the stackwalker binary at stackwalker_binary_path on a given microdump
+  using the symbols at symbols_path.
+
+  Args:
+    stackwalker_binary_path: Path to the stackwalker binary.
+    dump: The microdump to run the stackwalker on.
+    symbols_path: Path the the symbols file to use.
+
+  Returns:
+    Output from stackwalker tool.
+  """
+  with tempfile.NamedTemporaryFile() as tf:
+    for l in dump:
+      tf.write('%s\n' % l)
+    cmd = [stackwalker_binary_path, tf.name, symbols_path]
+    return cmd_helper.GetCmdOutput(cmd)
+
+
+def AddArguments(parser):
+  parser.add_argument('--stackwalker-binary-path', required=True,
+                      help='Path to stackwalker binary.')
+  parser.add_argument('--stack-trace-path', required=True,
+                      help='Path to stacktrace containing microdump.')
+  parser.add_argument('--symbols-path', required=True,
+                      help='Path to symbols file.')
+  parser.add_argument('--output-file',
+                      help='Path to dump stacktrace output to')
+
+
+def _PrintAndLog(line, fp):
+  if fp:
+    fp.write('%s\n' % line)
+  print(line)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  AddArguments(parser)
+  args = parser.parse_args()
+
+  micro_dumps = GetMicroDumps(args.stack_trace_path)
+  if not micro_dumps:
+    print('No microdump found. Exiting.')
+    return 0
+
+  symbolized_dumps = []
+  for micro_dump in micro_dumps:
+    symbolized_dumps.append(SymbolizeMicroDump(
+        args.stackwalker_binary_path, micro_dump, args.symbols_path))
+
+  try:
+    fp = open(args.output_file, 'w') if args.output_file else None
+    _PrintAndLog('%d microdumps found.' % len(micro_dumps), fp)
+    _PrintAndLog('---------- Start output from stackwalker ----------', fp)
+    for index, symbolized_dump in list(enumerate(symbolized_dumps)):
+      _PrintAndLog(
+          '------------------ Start dump %d ------------------' % index, fp)
+      _PrintAndLog(symbolized_dump, fp)
+      _PrintAndLog(
+          '------------------- End dump %d -------------------' % index, fp)
+    _PrintAndLog('----------- End output from stackwalker -----------', fp)
+  except Exception:
+    if fp:
+      fp.close()
+    raise
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test/BUILD.gn b/src/build/android/test/BUILD.gn
new file mode 100644
index 0000000..d5f8609
--- /dev/null
+++ b/src/build/android/test/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/android_nocompile.gni")
+import("nocompile_gn/nocompile_sources.gni")
+
+if (enable_java_templates) {
+  android_nocompile_test_suite("android_lint_test") {
+    # Depend on lint Python script so that the action is re-run whenever the lint script is
+    # modified.
+    pydeps = [ "//build/android/gyp/lint.pydeps" ]
+    tests = [
+      {
+        target = "nocompile_gn:default_locale_lint_test"
+        nocompile_sources =
+            rebase_path(default_locale_lint_test_nocompile_sources,
+                        "",
+                        "nocompile_gn")
+        expected_compile_output_regex = "Warning:.*DefaultLocale"
+      },
+      {
+        target = "nocompile_gn:new_api_lint_test"
+        nocompile_sources =
+            rebase_path(new_api_lint_test_nocompile_sources, "", "nocompile_gn")
+        expected_compile_output_regex = "Error:.*NewApi"
+      },
+    ]
+  }
+}
diff --git a/src/build/android/test/nocompile_gn/BUILD.gn b/src/build/android/test/nocompile_gn/BUILD.gn
new file mode 100644
index 0000000..d3262fe
--- /dev/null
+++ b/src/build/android/test/nocompile_gn/BUILD.gn
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/android_nocompile.gni")
+import("//build/config/android/rules.gni")
+import("nocompile_sources.gni")
+
+template("lint_test") {
+  _library_target_name = "${target_name}_test_java"
+  _apk_target_name = "${target_name}_apk"
+
+  android_library(_library_target_name) {
+    sources = [ "//tools/android/errorprone_plugin/test/src/org/chromium/tools/errorprone/plugin/Empty.java" ]
+    not_needed(invoker, [ "sources" ])
+    if (enable_android_nocompile_tests) {
+      sources += invoker.sources
+    }
+  }
+
+  android_apk(_apk_target_name) {
+    # This cannot be marked testonly since lint has special ignores for testonly
+    # targets. We need to test linting a normal apk target.
+    apk_name = _apk_target_name
+    deps = [ ":$_library_target_name" ]
+    android_manifest = "//build/android/AndroidManifest.xml"
+  }
+
+  android_lint(target_name) {
+    _apk_target = ":${_apk_target_name}"
+    deps = [ "${_apk_target}__java" ]
+    build_config_dep = "$_apk_target$build_config_target_suffix"
+    build_config = get_label_info(_apk_target, "target_gen_dir") + "/" +
+                   get_label_info(_apk_target, "name") + ".build_config"
+    if (enable_android_nocompile_tests) {
+      skip_build_server = true
+    }
+  }
+}
+
+lint_test("default_locale_lint_test") {
+  sources = default_locale_lint_test_nocompile_sources
+}
+
+lint_test("new_api_lint_test") {
+  sources = new_api_lint_test_nocompile_sources
+}
diff --git a/src/build/android/test/nocompile_gn/nocompile_sources.gni b/src/build/android/test/nocompile_gn/nocompile_sources.gni
new file mode 100644
index 0000000..8fc049e
--- /dev/null
+++ b/src/build/android/test/nocompile_gn/nocompile_sources.gni
@@ -0,0 +1,8 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+default_locale_lint_test_nocompile_sources =
+    [ "../../java/test/DefaultLocaleLintTest.java" ]
+
+new_api_lint_test_nocompile_sources = [ "../../java/test/NewApiLintTest.java" ]
diff --git a/src/build/android/test_runner.py b/src/build/android/test_runner.py
new file mode 100755
index 0000000..84010c3
--- /dev/null
+++ b/src/build/android/test_runner.py
@@ -0,0 +1,1176 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+from __future__ import absolute_import
+import argparse
+import collections
+import contextlib
+import itertools
+import logging
+import os
+import re
+import shutil
+import signal
+import sys
+import tempfile
+import threading
+import traceback
+import unittest
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See http://crbug.com/724524 and https://bugs.python.org/issue7980.
+import _strptime  # pylint: disable=unused-import
+
+# pylint: disable=redefined-builtin
+from six.moves import range  # Needed for python 3 compatibility.
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import base_error
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import output_manager
+from pylib.base import output_manager_factory
+from pylib.base import result_sink
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.results.presentation import test_results_presentation
+from pylib.utils import local_utils
+from pylib.utils import logdog_helper
+from pylib.utils import logging_utils
+from pylib.utils import test_filter
+
+from py_utils import contextlib_ext
+
+_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
+
+
+def _RealPath(arg):
+  if arg.startswith('//'):
+    arg = os.path.abspath(os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                       arg[2:].replace('/', os.sep)))
+  return os.path.realpath(arg)
+
+
+def AddTestLauncherOptions(parser):
+  """Adds arguments mirroring //base/test/launcher.
+
+  Args:
+    parser: The parser to which arguments should be added.
+  Returns:
+    The given parser.
+  """
+  parser.add_argument(
+      '--test-launcher-retry-limit',
+      '--test_launcher_retry_limit',
+      '--num_retries', '--num-retries',
+      '--isolated-script-test-launcher-retry-limit',
+      dest='num_retries', type=int, default=2,
+      help='Number of retries for a test before '
+           'giving up (default: %(default)s).')
+  parser.add_argument(
+      '--test-launcher-summary-output',
+      '--json-results-file',
+      dest='json_results_file', type=os.path.realpath,
+      help='If set, will dump results in JSON form to the specified file. '
+           'Note that this will also trigger saving per-test logcats to '
+           'logdog.')
+  parser.add_argument(
+      '--test-launcher-shard-index',
+      type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+      help='Index of the external shard to run.')
+  parser.add_argument(
+      '--test-launcher-total-shards',
+      type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+      help='Total number of external shards.')
+
+  test_filter.AddFilterOptions(parser)
+
+  return parser
+
+
+def AddCommandLineOptions(parser):
+  """Adds arguments to support passing command-line flags to the device."""
+  parser.add_argument(
+      '--device-flags-file',
+      type=os.path.realpath,
+      help='The relative filepath to a file containing '
+           'command-line flags to set on the device')
+  parser.add_argument(
+      '--use-apk-under-test-flags-file',
+      action='store_true',
+      help='Wether to use the flags file for the apk under test. If set, '
+           "the filename will be looked up in the APK's PackageInfo.")
+  parser.set_defaults(allow_unknown=True)
+  parser.set_defaults(command_line_flags=None)
+
+
+def AddTracingOptions(parser):
+  # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
+  # for all test types.
+  parser.add_argument(
+      '--trace-output',
+      metavar='FILENAME', type=os.path.realpath,
+      help='Path to save test_runner trace json output to.')
+
+  parser.add_argument(
+      '--trace-all',
+      action='store_true',
+      help='Whether to trace all function calls.')
+
+
+def AddCommonOptions(parser):
+  """Adds all common options to |parser|."""
+
+  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+  debug_or_release_group = parser.add_mutually_exclusive_group()
+  debug_or_release_group.add_argument(
+      '--debug',
+      action='store_const', const='Debug', dest='build_type',
+      default=default_build_type,
+      help='If set, run test suites under out/Debug. '
+           'Default is env var BUILDTYPE or Debug.')
+  debug_or_release_group.add_argument(
+      '--release',
+      action='store_const', const='Release', dest='build_type',
+      help='If set, run test suites under out/Release. '
+           'Default is env var BUILDTYPE or Debug.')
+
+  parser.add_argument(
+      '--break-on-failure', '--break_on_failure',
+      dest='break_on_failure', action='store_true',
+      help='Whether to break on failure.')
+
+  # TODO(jbudorick): Remove this once everything has switched to platform
+  # mode.
+  parser.add_argument(
+      '--enable-platform-mode',
+      action='store_true',
+      help='Run the test scripts in platform mode, which '
+           'conceptually separates the test runner from the '
+           '"device" (local or remote, real or emulated) on '
+           'which the tests are running. [experimental]')
+
+  parser.add_argument(
+      '-e', '--environment',
+      default='local', choices=constants.VALID_ENVIRONMENTS,
+      help='Test environment to run in (default: %(default)s).')
+
+  parser.add_argument(
+      '--local-output',
+      action='store_true',
+      help='Whether to archive test output locally and generate '
+           'a local results detail page.')
+
+  class FastLocalDevAction(argparse.Action):
+    def __call__(self, parser, namespace, values, option_string=None):
+      namespace.enable_concurrent_adb = True
+      namespace.enable_device_cache = True
+      namespace.extract_test_list_from_filter = True
+      namespace.local_output = True
+      namespace.num_retries = 0
+      namespace.skip_clear_data = True
+
+  parser.add_argument(
+      '--fast-local-dev',
+      type=bool,
+      nargs=0,
+      action=FastLocalDevAction,
+      help='Alias for: --num-retries=0 --enable-device-cache '
+      '--enable-concurrent-adb --skip-clear-data '
+      '--extract-test-list-from-filter --local-output')
+
+  # TODO(jbudorick): Remove this once downstream bots have switched to
+  # api.test_results.
+  parser.add_argument(
+      '--flakiness-dashboard-server',
+      dest='flakiness_dashboard_server',
+      help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--gs-results-bucket',
+      help='Google Storage bucket to upload results to.')
+
+  parser.add_argument(
+      '--output-directory',
+      dest='output_directory', type=os.path.realpath,
+      help='Path to the directory in which build files are'
+           ' located (must include build type). This will take'
+           ' precedence over --debug and --release')
+  parser.add_argument(
+      '-v', '--verbose',
+      dest='verbose_count', default=0, action='count',
+      help='Verbose level (multiple times for more)')
+
+  parser.add_argument(
+      '--repeat', '--gtest_repeat', '--gtest-repeat',
+      '--isolated-script-test-repeat',
+      dest='repeat', type=int, default=0,
+      help='Number of times to repeat the specified set of tests.')
+
+  # This is currently only implemented for gtests and instrumentation tests.
+  parser.add_argument(
+      '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
+      '--isolated-script-test-also-run-disabled-tests',
+      dest='run_disabled', action='store_true',
+      help='Also run disabled tests if applicable.')
+
+  # These are currently only implemented for gtests.
+  parser.add_argument('--isolated-script-test-output',
+                      help='If present, store test results on this path.')
+  parser.add_argument('--isolated-script-test-perf-output',
+                      help='If present, store chartjson results on this path.')
+
+  AddTestLauncherOptions(parser)
+
+
+def ProcessCommonOptions(args):
+  """Processes and handles all common options."""
+  run_tests_helper.SetLogLevel(args.verbose_count, add_handler=False)
+  # pylint: disable=redefined-variable-type
+  if args.verbose_count > 0:
+    handler = logging_utils.ColorStreamHandler()
+  else:
+    handler = logging.StreamHandler(sys.stdout)
+  # pylint: enable=redefined-variable-type
+  handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(handler)
+
+  constants.SetBuildType(args.build_type)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+
+def AddDeviceOptions(parser):
+  """Adds device options to |parser|."""
+
+  parser = parser.add_argument_group('device arguments')
+
+  parser.add_argument(
+      '--adb-path',
+      type=os.path.realpath,
+      help='Specify the absolute path of the adb binary that '
+           'should be used.')
+  parser.add_argument('--denylist-file',
+                      type=os.path.realpath,
+                      help='Device denylist file.')
+  parser.add_argument(
+      '-d', '--device', nargs='+',
+      dest='test_devices',
+      help='Target device(s) for the test suite to run on.')
+  parser.add_argument(
+      '--enable-concurrent-adb',
+      action='store_true',
+      help='Run multiple adb commands at the same time, even '
+           'for the same device.')
+  parser.add_argument(
+      '--enable-device-cache',
+      action='store_true',
+      help='Cache device state to disk between runs')
+  parser.add_argument(
+      '--skip-clear-data',
+      action='store_true',
+      help='Do not wipe app data between tests. Use this to '
+           'speed up local development and never on bots '
+                     '(increases flakiness)')
+  parser.add_argument(
+      '--recover-devices',
+      action='store_true',
+      help='Attempt to recover devices prior to the final retry. Warning: '
+           'this will cause all devices to reboot.')
+  parser.add_argument(
+      '--tool',
+      dest='tool',
+      help='Run the test under a tool '
+           '(use --tool help to list them)')
+
+  parser.add_argument(
+      '--upload-logcats-file',
+      action='store_true',
+      dest='upload_logcats_file',
+      help='Whether to upload logcat file to logdog.')
+
+  logcat_output_group = parser.add_mutually_exclusive_group()
+  logcat_output_group.add_argument(
+      '--logcat-output-dir', type=os.path.realpath,
+      help='If set, will dump logcats recorded during test run to directory. '
+           'File names will be the device ids with timestamps.')
+  logcat_output_group.add_argument(
+      '--logcat-output-file', type=os.path.realpath,
+      help='If set, will merge logcats recorded during test run and dump them '
+           'to the specified file.')
+
+
+def AddEmulatorOptions(parser):
+  """Adds emulator-specific options to |parser|."""
+  parser = parser.add_argument_group('emulator arguments')
+
+  parser.add_argument(
+      '--avd-config',
+      type=os.path.realpath,
+      help='Path to the avd config textpb. '
+      '(See //tools/android/avd/proto/ for message definition'
+      ' and existing textpb files.)')
+  parser.add_argument(
+      '--emulator-count',
+      type=int,
+      default=1,
+      help='Number of emulators to use.')
+  parser.add_argument(
+      '--emulator-window',
+      action='store_true',
+      default=False,
+      help='Enable graphical window display on the emulator.')
+
+
+def AddGTestOptions(parser):
+  """Adds gtest options to |parser|."""
+
+  parser = parser.add_argument_group('gtest arguments')
+
+  parser.add_argument(
+      '--app-data-file',
+      action='append', dest='app_data_files',
+      help='A file path relative to the app data directory '
+           'that should be saved to the host.')
+  parser.add_argument(
+      '--app-data-file-dir',
+      help='Host directory to which app data files will be'
+           ' saved. Used with --app-data-file.')
+  parser.add_argument(
+      '--delete-stale-data',
+      dest='delete_stale_data', action='store_true',
+      help='Delete stale test data on the device.')
+  parser.add_argument(
+      '--enable-xml-result-parsing',
+      action='store_true', help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--executable-dist-dir',
+      type=os.path.realpath,
+      help="Path to executable's dist directory for native"
+           " (non-apk) tests.")
+  parser.add_argument(
+      '--extract-test-list-from-filter',
+      action='store_true',
+      help='When a test filter is specified, and the list of '
+           'tests can be determined from it, skip querying the '
+           'device for the list of all tests. Speeds up local '
+           'development, but is not safe to use on bots ('
+           'http://crbug.com/549214')
+  parser.add_argument(
+      '--gs-test-artifacts-bucket',
+      help=('If present, test artifacts will be uploaded to this Google '
+            'Storage bucket.'))
+  parser.add_argument(
+      '--render-test-output-dir',
+      help='If present, store rendering artifacts in this path.')
+  parser.add_argument(
+      '--runtime-deps-path',
+      dest='runtime_deps_path', type=os.path.realpath,
+      help='Runtime data dependency file from GN.')
+  parser.add_argument(
+      '-t', '--shard-timeout',
+      dest='shard_timeout', type=int, default=120,
+      help='Timeout to wait for each test (default: %(default)s).')
+  parser.add_argument(
+      '--store-tombstones',
+      dest='store_tombstones', action='store_true',
+      help='Add tombstones in results if crash.')
+  parser.add_argument(
+      '-s', '--suite',
+      dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
+      help='Executable name of the test suite to run.')
+  parser.add_argument(
+      '--test-apk-incremental-install-json',
+      type=os.path.realpath,
+      help='Path to install json for the test apk.')
+  parser.add_argument('--test-launcher-batch-limit',
+                      dest='test_launcher_batch_limit',
+                      type=int,
+                      help='The max number of tests to run in a shard. '
+                      'Ignores non-positive ints and those greater than '
+                      'MAX_SHARDS')
+  parser.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_true',
+      help='Wait for java debugger to attach before running any application '
+           'code. Also disables test timeouts and sets retries=0.')
+  parser.add_argument(
+      '--coverage-dir',
+      type=os.path.realpath,
+      help='Directory in which to place all generated coverage files.')
+
+
+def AddInstrumentationTestOptions(parser):
+  """Adds Instrumentation test options to |parser|."""
+
+  parser = parser.add_argument_group('instrumentation arguments')
+
+  parser.add_argument(
+      '--additional-apk',
+      action='append', dest='additional_apks', default=[],
+      type=_RealPath,
+      help='Additional apk that must be installed on '
+           'the device when the tests are run')
+  parser.add_argument(
+      '-A', '--annotation',
+      dest='annotation_str',
+      help='Comma-separated list of annotations. Run only tests with any of '
+           'the given annotations. An annotation can be either a key or a '
+           'key-values pair. A test that has no annotation is considered '
+           '"SmallTest".')
+  # TODO(jbudorick): Remove support for name-style APK specification once
+  # bots are no longer doing it.
+  parser.add_argument(
+      '--apk-under-test',
+      help='Path or name of the apk under test.')
+  parser.add_argument(
+      '--module',
+      action='append',
+      dest='modules',
+      help='Specify Android App Bundle modules to install in addition to the '
+      'base module.')
+  parser.add_argument(
+      '--fake-module',
+      action='append',
+      dest='fake_modules',
+      help='Specify Android App Bundle modules to fake install in addition to '
+      'the real modules.')
+  parser.add_argument(
+      '--additional-locale',
+      action='append',
+      dest='additional_locales',
+      help='Specify locales in addition to the device locale to install splits '
+      'for when --apk-under-test is an Android App Bundle.')
+  parser.add_argument(
+      '--coverage-dir',
+      type=os.path.realpath,
+      help='Directory in which to place all generated '
+      'Jacoco coverage files.')
+  parser.add_argument(
+      '--delete-stale-data',
+      action='store_true', dest='delete_stale_data',
+      help='Delete stale test data on the device.')
+  parser.add_argument(
+      '--disable-dalvik-asserts',
+      dest='set_asserts', action='store_false', default=True,
+      help='Removes the dalvik.vm.enableassertions property')
+  parser.add_argument(
+      '--enable-java-deobfuscation',
+      action='store_true',
+      help='Deobfuscate java stack traces in test output and logcat.')
+  parser.add_argument(
+      '-E', '--exclude-annotation',
+      dest='exclude_annotation_str',
+      help='Comma-separated list of annotations. Exclude tests with these '
+           'annotations.')
+  def package_replacement(arg):
+    split_arg = arg.split(',')
+    if len(split_arg) != 2:
+      raise argparse.ArgumentError(
+          arg,
+          'Expected two comma-separated strings for --replace-system-package, '
+          'received %d' % len(split_arg))
+    PackageReplacement = collections.namedtuple('PackageReplacement',
+                                                ['package', 'replacement_apk'])
+    return PackageReplacement(package=split_arg[0],
+                              replacement_apk=_RealPath(split_arg[1]))
+  parser.add_argument(
+      '--replace-system-package',
+      type=package_replacement, default=None,
+      help='Specifies a system package to replace with a given APK for the '
+           'duration of the test. Given as a comma-separated pair of strings, '
+           'the first element being the package and the second the path to the '
+           'replacement APK. Only supports replacing one package. Example: '
+           '--replace-system-package com.example.app,path/to/some.apk')
+  parser.add_argument(
+      '--remove-system-package',
+      default=[],
+      action='append',
+      dest='system_packages_to_remove',
+      help='Specifies a system package to remove before testing if it exists '
+      'on the system. WARNING: THIS WILL PERMANENTLY REMOVE THE SYSTEM APP. '
+      'Unlike --replace-system-package, the app will not be restored after '
+      'tests are finished.')
+
+  parser.add_argument(
+      '--use-webview-provider',
+      type=_RealPath, default=None,
+      help='Use this apk as the webview provider during test. '
+           'The original provider will be restored if possible, '
+           "on Nougat the provider can't be determined and so "
+           'the system will choose the default provider.')
+  parser.add_argument(
+      '--runtime-deps-path',
+      dest='runtime_deps_path', type=os.path.realpath,
+      help='Runtime data dependency file from GN.')
+  parser.add_argument(
+      '--screenshot-directory',
+      dest='screenshot_dir', type=os.path.realpath,
+      help='Capture screenshots of test failures')
+  parser.add_argument(
+      '--shared-prefs-file',
+      dest='shared_prefs_file', type=_RealPath,
+      help='The relative path to a file containing JSON list of shared '
+           'preference files to edit and how to do so. Example list: '
+           '[{'
+           '  "package": "com.package.example",'
+           '  "filename": "ExampleSettings.xml",'
+           '  "set": {'
+           '    "boolean_key_in_xml": true,'
+           '    "string_key_in_xml": "string_value"'
+           '  },'
+           '  "remove": ['
+           '    "key_in_xml_to_remove"'
+           '  ]'
+           '}]')
+  parser.add_argument(
+      '--store-tombstones',
+      action='store_true', dest='store_tombstones',
+      help='Add tombstones in results if crash.')
+  parser.add_argument(
+      '--strict-mode',
+      dest='strict_mode', default='testing',
+      help='StrictMode command-line flag set on the device, '
+           'death/testing to kill the process, off to stop '
+           'checking, flash to flash only. (default: %(default)s)')
+  parser.add_argument(
+      '--test-apk',
+      required=True,
+      help='Path or name of the apk containing the tests.')
+  parser.add_argument(
+      '--test-jar',
+      help='Path of jar containing test java files.')
+  parser.add_argument(
+      '--timeout-scale',
+      type=float,
+      help='Factor by which timeouts should be scaled.')
+  parser.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_true',
+      help='Wait for java debugger to attach before running any application '
+           'code. Also disables test timeouts and sets retries=0.')
+
+  # WPR record mode.
+  parser.add_argument('--wpr-enable-record',
+                      action='store_true',
+                      default=False,
+                      help='If true, WPR server runs in record mode.'
+                      'otherwise, runs in replay mode.')
+
+  # These arguments are suppressed from the help text because they should
+  # only ever be specified by an intermediate script.
+  parser.add_argument(
+      '--apk-under-test-incremental-install-json',
+      help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--test-apk-incremental-install-json',
+      type=os.path.realpath,
+      help=argparse.SUPPRESS)
+
+
+def AddSkiaGoldTestOptions(parser):
+  """Adds Skia Gold test options to |parser|."""
+  parser = parser.add_argument_group("Skia Gold arguments")
+  parser.add_argument(
+      '--code-review-system',
+      help='A non-default code review system to pass to pass to Gold, if '
+      'applicable')
+  parser.add_argument(
+      '--git-revision', help='The git commit currently being tested.')
+  parser.add_argument(
+      '--gerrit-issue',
+      help='The Gerrit issue this test is being run on, if applicable.')
+  parser.add_argument(
+      '--gerrit-patchset',
+      help='The Gerrit patchset this test is being run on, if applicable.')
+  parser.add_argument(
+      '--buildbucket-id',
+      help='The Buildbucket build ID that this test was triggered from, if '
+      'applicable.')
+  local_group = parser.add_mutually_exclusive_group()
+  local_group.add_argument(
+      '--local-pixel-tests',
+      action='store_true',
+      default=None,
+      help='Specifies to run the Skia Gold pixel tests in local mode. When run '
+      'in local mode, uploading to Gold is disabled and traditional '
+      'generated/golden/diff images are output instead of triage links. '
+      'Running in local mode also implies --no-luci-auth. If both this '
+      'and --no-local-pixel-tests are left unset, the test harness will '
+      'attempt to detect whether it is running on a workstation or not '
+      'and set the options accordingly.')
+  local_group.add_argument(
+      '--no-local-pixel-tests',
+      action='store_false',
+      dest='local_pixel_tests',
+      help='Specifies to run the Skia Gold pixel tests in non-local (bot) '
+      'mode. When run in this mode, data is actually uploaded to Gold and '
+      'triage links are generated. If both this and --local-pixel-tests '
+      'are left unset, the test harness will attempt to detect whether '
+      'it is running on a workstation or not and set the options '
+      'accordingly.')
+  parser.add_argument(
+      '--no-luci-auth',
+      action='store_true',
+      default=False,
+      help="Don't use the serve account provided by LUCI for authentication "
+      'with Skia Gold, instead relying on gsutil to be pre-authenticated. '
+      'Meant for testing locally instead of on the bots.')
+  parser.add_argument(
+      '--bypass-skia-gold-functionality',
+      action='store_true',
+      default=False,
+      help='Bypass all interaction with Skia Gold, effectively disabling the '
+      'image comparison portion of any tests that use Gold. Only meant to be '
+      'used in case a Gold outage occurs and cannot be fixed quickly.')
+
+
+def AddJUnitTestOptions(parser):
+  """Adds junit test options to |parser|."""
+
+  parser = parser.add_argument_group('junit arguments')
+
+  parser.add_argument(
+      '--coverage-on-the-fly',
+      action='store_true',
+      help='Generate coverage data by Jacoco on-the-fly instrumentation.')
+  parser.add_argument(
+      '--coverage-dir', type=os.path.realpath,
+      help='Directory to store coverage info.')
+  parser.add_argument(
+      '--package-filter',
+      help='Filters tests by package.')
+  parser.add_argument(
+      '--runner-filter',
+      help='Filters tests by runner class. Must be fully qualified.')
+  parser.add_argument(
+      '--shards',
+      default=-1,
+      type=int,
+      help='Number of shards to run junit tests in parallel on. Only 1 shard '
+      'is supported when test-filter is specified. Values less than 1 will '
+      'use auto select.')
+  parser.add_argument(
+      '-s', '--test-suite', required=True,
+      help='JUnit test suite to run.')
+  debug_group = parser.add_mutually_exclusive_group()
+  debug_group.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_const', const='8701',
+      dest='debug_socket', help='Alias for --debug-socket=8701')
+  debug_group.add_argument(
+      '--debug-socket',
+      help='Wait for java debugger to attach at specified socket address '
+           'before running any application code. Also disables test timeouts '
+           'and sets retries=0.')
+
+  # These arguments are for Android Robolectric tests.
+  parser.add_argument(
+      '--robolectric-runtime-deps-dir',
+      help='Path to runtime deps for Robolectric.')
+  parser.add_argument(
+      '--resource-apk',
+      required=True,
+      help='Path to .ap_ containing binary resources for Robolectric.')
+
+
+def AddLinkerTestOptions(parser):
+
+  parser = parser.add_argument_group('linker arguments')
+
+  parser.add_argument(
+      '--test-apk',
+      type=os.path.realpath,
+      help='Path to the linker test APK.')
+
+
+def AddMonkeyTestOptions(parser):
+  """Adds monkey test options to |parser|."""
+
+  parser = parser.add_argument_group('monkey arguments')
+
+  parser.add_argument('--browser',
+                      required=True,
+                      choices=list(constants.PACKAGE_INFO.keys()),
+                      metavar='BROWSER',
+                      help='Browser under test.')
+  parser.add_argument(
+      '--category',
+      nargs='*', dest='categories', default=[],
+      help='A list of allowed categories. Monkey will only visit activities '
+           'that are listed with one of the specified categories.')
+  parser.add_argument(
+      '--event-count',
+      default=10000, type=int,
+      help='Number of events to generate (default: %(default)s).')
+  parser.add_argument(
+      '--seed',
+      type=int,
+      help='Seed value for pseudo-random generator. Same seed value generates '
+           'the same sequence of events. Seed is randomized by default.')
+  parser.add_argument(
+      '--throttle',
+      default=100, type=int,
+      help='Delay between events (ms) (default: %(default)s). ')
+
+
+def AddPythonTestOptions(parser):
+
+  parser = parser.add_argument_group('python arguments')
+
+  parser.add_argument('-s',
+                      '--suite',
+                      dest='suite_name',
+                      metavar='SUITE_NAME',
+                      choices=list(constants.PYTHON_UNIT_TEST_SUITES.keys()),
+                      help='Name of the test suite to run.')
+
+
+def _CreateClassToFileNameDict(test_apk):
+  """Creates a dict mapping classes to file names from size-info apk."""
+  constants.CheckOutputDirectory()
+  test_apk_size_info = os.path.join(constants.GetOutDirectory(), 'size-info',
+                                    os.path.basename(test_apk) + '.jar.info')
+
+  class_to_file_dict = {}
+  # Some tests such as webview_cts_tests use a separately downloaded apk to run
+  # tests. This means the apk may not have been built by the system and hence
+  # no size info file exists.
+  if not os.path.exists(test_apk_size_info):
+    logging.debug('Apk size file not found. %s', test_apk_size_info)
+    return class_to_file_dict
+
+  with open(test_apk_size_info, 'r') as f:
+    for line in f:
+      file_class, file_name = line.rstrip().split(',', 1)
+      # Only want files that are not prebuilt.
+      if file_name.startswith('../../'):
+        class_to_file_dict[file_class] = str(
+            file_name.replace('../../', '//', 1))
+
+  return class_to_file_dict
+
+
+def _RunPythonTests(args):
+  """Subcommand of RunTestsCommand which runs python unit tests."""
+  suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+  suite_path = suite_vars['path']
+  suite_test_modules = suite_vars['test_modules']
+
+  sys.path = [suite_path] + sys.path
+  try:
+    suite = unittest.TestSuite()
+    suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+                   for m in suite_test_modules)
+    runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+    return 0 if runner.run(suite).wasSuccessful() else 1
+  finally:
+    sys.path = sys.path[1:]
+
+
+_DEFAULT_PLATFORM_MODE_TESTS = [
+    'gtest', 'instrumentation', 'junit', 'linker', 'monkey'
+]
+
+
+def RunTestsCommand(args, result_sink_client=None):
+  """Checks test type and dispatches to the appropriate function.
+
+  Args:
+    args: argparse.Namespace object.
+    result_sink_client: A ResultSinkClient object.
+
+  Returns:
+    Integer indicated exit code.
+
+  Raises:
+    Exception: Unknown command name passed in, or an exception from an
+        individual test runner.
+  """
+  command = args.command
+
+  ProcessCommonOptions(args)
+  logging.info('command: %s', ' '.join(sys.argv))
+  if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
+    return RunTestsInPlatformMode(args, result_sink_client)
+
+  if command == 'python':
+    return _RunPythonTests(args)
+  else:
+    raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+  # TODO(jbudorick): Add support for more test types.
+  'gtest',
+  'instrumentation',
+  'junit',
+  'linker',
+  'monkey',
+]
+
+
+def RunTestsInPlatformMode(args, result_sink_client=None):
+
+  def infra_error(message):
+    logging.fatal(message)
+    sys.exit(constants.INFRA_EXIT_CODE)
+
+  if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+    infra_error('%s is not yet supported in platform mode' % args.command)
+
+  ### Set up sigterm handler.
+
+  contexts_to_notify_on_sigterm = []
+  def unexpected_sigterm(_signum, _frame):
+    msg = [
+      'Received SIGTERM. Shutting down.',
+    ]
+    for live_thread in threading.enumerate():
+      # pylint: disable=protected-access
+      thread_stack = ''.join(traceback.format_stack(
+          sys._current_frames()[live_thread.ident]))
+      msg.extend([
+        'Thread "%s" (ident: %s) is currently running:' % (
+            live_thread.name, live_thread.ident),
+        thread_stack])
+
+    for context in contexts_to_notify_on_sigterm:
+      context.ReceivedSigterm()
+
+    infra_error('\n'.join(msg))
+
+  signal.signal(signal.SIGTERM, unexpected_sigterm)
+
+  ### Set up results handling.
+  # TODO(jbudorick): Rewrite results handling.
+
+  # all_raw_results is a list of lists of
+  # base_test_result.TestRunResults objects. Each instance of
+  # TestRunResults contains all test results produced by a single try,
+  # while each list of TestRunResults contains all tries in a single
+  # iteration.
+  all_raw_results = []
+
+  # all_iteration_results is a list of base_test_result.TestRunResults
+  # objects. Each instance of TestRunResults contains the last test
+  # result for each test run in that iteration.
+  all_iteration_results = []
+
+  global_results_tags = set()
+
+  json_file = tempfile.NamedTemporaryFile(delete=False)
+  json_file.close()
+
+  @contextlib.contextmanager
+  def json_finalizer():
+    try:
+      yield
+    finally:
+      if args.json_results_file and os.path.exists(json_file.name):
+        shutil.move(json_file.name, args.json_results_file)
+      elif args.isolated_script_test_output and os.path.exists(json_file.name):
+        shutil.move(json_file.name, args.isolated_script_test_output)
+      else:
+        os.remove(json_file.name)
+
+  @contextlib.contextmanager
+  def json_writer():
+    try:
+      yield
+    except Exception:
+      global_results_tags.add('UNRELIABLE_RESULTS')
+      raise
+    finally:
+      if args.isolated_script_test_output:
+        interrupted = 'UNRELIABLE_RESULTS' in global_results_tags
+        json_results.GenerateJsonTestResultFormatFile(all_raw_results,
+                                                      interrupted,
+                                                      json_file.name,
+                                                      indent=2)
+      else:
+        json_results.GenerateJsonResultsFile(
+            all_raw_results,
+            json_file.name,
+            global_tags=list(global_results_tags),
+            indent=2)
+
+      test_class_to_file_name_dict = {}
+      # Test Location is only supported for instrumentation tests as it
+      # requires the size-info file.
+      if test_instance.TestType() == 'instrumentation':
+        test_class_to_file_name_dict = _CreateClassToFileNameDict(args.test_apk)
+
+      if result_sink_client:
+        for run in all_raw_results:
+          for results in run:
+            for r in results.GetAll():
+              # Matches chrome.page_info.PageInfoViewTest#testChromePage
+              match = re.search(r'^(.+\..+)#', r.GetName())
+              test_file_name = test_class_to_file_name_dict.get(
+                  match.group(1)) if match else None
+              # Some tests put in non utf-8 char as part of the test
+              # which breaks uploads, so need to decode and re-encode.
+              result_sink_client.Post(
+                  r.GetName(), r.GetType(), r.GetDuration(),
+                  r.GetLog().decode('utf-8', 'replace').encode('utf-8'),
+                  test_file_name)
+
+  @contextlib.contextmanager
+  def upload_logcats_file():
+    try:
+      yield
+    finally:
+      if not args.logcat_output_file:
+        logging.critical('Cannot upload logcat file: no file specified.')
+      elif not os.path.exists(args.logcat_output_file):
+        logging.critical("Cannot upload logcat file: file doesn't exist.")
+      else:
+        with open(args.logcat_output_file) as src:
+          dst = logdog_helper.open_text('unified_logcats')
+          if dst:
+            shutil.copyfileobj(src, dst)
+            dst.close()
+            logging.critical(
+                'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats'))
+
+
+  logcats_uploader = contextlib_ext.Optional(
+      upload_logcats_file(),
+      'upload_logcats_file' in args and args.upload_logcats_file)
+
+  ### Set up test objects.
+
+  out_manager = output_manager_factory.CreateOutputManager(args)
+  env = environment_factory.CreateEnvironment(
+      args, out_manager, infra_error)
+  test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
+  test_run = test_run_factory.CreateTestRun(env, test_instance, infra_error)
+
+  contexts_to_notify_on_sigterm.append(env)
+  contexts_to_notify_on_sigterm.append(test_run)
+
+  ### Run.
+  with out_manager, json_finalizer():
+    with json_writer(), logcats_uploader, env, test_instance, test_run:
+
+      repetitions = (range(args.repeat +
+                           1) if args.repeat >= 0 else itertools.count())
+      result_counts = collections.defaultdict(
+          lambda: collections.defaultdict(int))
+      iteration_count = 0
+      for _ in repetitions:
+        # raw_results will be populated with base_test_result.TestRunResults by
+        # test_run.RunTests(). It is immediately added to all_raw_results so
+        # that in the event of an exception, all_raw_results will already have
+        # the up-to-date results and those can be written to disk.
+        raw_results = []
+        all_raw_results.append(raw_results)
+
+        test_run.RunTests(raw_results)
+        if not raw_results:
+          all_raw_results.pop()
+          continue
+
+        iteration_results = base_test_result.TestRunResults()
+        for r in reversed(raw_results):
+          iteration_results.AddTestRunResults(r)
+        all_iteration_results.append(iteration_results)
+        iteration_count += 1
+
+        for r in iteration_results.GetAll():
+          result_counts[r.GetName()][r.GetType()] += 1
+
+        report_results.LogFull(
+            results=iteration_results,
+            test_type=test_instance.TestType(),
+            test_package=test_run.TestPackage(),
+            annotation=getattr(args, 'annotations', None),
+            flakiness_server=getattr(args, 'flakiness_dashboard_server',
+                                     None))
+        if args.break_on_failure and not iteration_results.DidRunPass():
+          break
+
+      if iteration_count > 1:
+        # display summary results
+        # only display results for a test if at least one test did not pass
+        all_pass = 0
+        tot_tests = 0
+        for test_name in result_counts:
+          tot_tests += 1
+          if any(result_counts[test_name][x] for x in (
+              base_test_result.ResultType.FAIL,
+              base_test_result.ResultType.CRASH,
+              base_test_result.ResultType.TIMEOUT,
+              base_test_result.ResultType.UNKNOWN)):
+            logging.critical(
+                '%s: %s',
+                test_name,
+                ', '.join('%s %s' % (str(result_counts[test_name][i]), i)
+                          for i in base_test_result.ResultType.GetTypes()))
+          else:
+            all_pass += 1
+
+        logging.critical('%s of %s tests passed in all %s runs',
+                         str(all_pass),
+                         str(tot_tests),
+                         str(iteration_count))
+
+    if (args.local_output or not local_utils.IsOnSwarming()
+        ) and not args.isolated_script_test_output:
+      with out_manager.ArchivedTempfile(
+          'test_results_presentation.html',
+          'test_results_presentation',
+          output_manager.Datatype.HTML) as results_detail_file:
+        result_html_string, _, _ = test_results_presentation.result_details(
+            json_path=json_file.name,
+            test_name=args.command,
+            cs_base_url='http://cs.chromium.org',
+            local_output=True)
+        results_detail_file.write(result_html_string.encode('utf-8'))
+        results_detail_file.flush()
+      logging.critical('TEST RESULTS: %s', results_detail_file.Link())
+
+      ui_screenshots = test_results_presentation.ui_screenshot_set(
+          json_file.name)
+      if ui_screenshots:
+        with out_manager.ArchivedTempfile(
+            'ui_screenshots.json',
+            'ui_capture',
+            output_manager.Datatype.JSON) as ui_screenshot_file:
+          ui_screenshot_file.write(ui_screenshots)
+        logging.critical('UI Screenshots: %s', ui_screenshot_file.Link())
+
+  return (0 if all(r.DidRunPass() for r in all_iteration_results)
+          else constants.ERROR_EXIT_CODE)
+
+
+def DumpThreadStacks(_signal, _frame):
+  for thread in threading.enumerate():
+    reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+  signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+  parser = argparse.ArgumentParser()
+  command_parsers = parser.add_subparsers(
+      title='test types', dest='command')
+
+  subp = command_parsers.add_parser(
+      'gtest',
+      help='googletest-based C++ tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddGTestOptions(subp)
+  AddTracingOptions(subp)
+  AddCommandLineOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'instrumentation',
+      help='InstrumentationTestCase-based Java tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddInstrumentationTestOptions(subp)
+  AddSkiaGoldTestOptions(subp)
+  AddTracingOptions(subp)
+  AddCommandLineOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'junit',
+      help='JUnit4-based Java tests')
+  AddCommonOptions(subp)
+  AddJUnitTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'linker',
+      help='linker tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddLinkerTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'monkey',
+      help="tests based on Android's monkey command")
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddMonkeyTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'python',
+      help='python tests based on unittest.TestCase')
+  AddCommonOptions(subp)
+  AddPythonTestOptions(subp)
+
+  args, unknown_args = parser.parse_known_args()
+  if unknown_args:
+    if hasattr(args, 'allow_unknown') and args.allow_unknown:
+      args.command_line_flags = unknown_args
+    else:
+      parser.error('unrecognized arguments: %s' % ' '.join(unknown_args))
+
+  # --replace-system-package/--remove-system-package has the potential to cause
+  # issues if --enable-concurrent-adb is set, so disallow that combination.
+  concurrent_adb_enabled = (hasattr(args, 'enable_concurrent_adb')
+                            and args.enable_concurrent_adb)
+  replacing_system_packages = (hasattr(args, 'replace_system_package')
+                               and args.replace_system_package)
+  removing_system_packages = (hasattr(args, 'system_packages_to_remove')
+                              and args.system_packages_to_remove)
+  if (concurrent_adb_enabled
+      and (replacing_system_packages or removing_system_packages)):
+    parser.error('--enable-concurrent-adb cannot be used with either '
+                 '--replace-system-package or --remove-system-package')
+
+  # --use-webview-provider has the potential to cause issues if
+  # --enable-concurrent-adb is set, so disallow that combination
+  if (hasattr(args, 'use_webview_provider') and
+      hasattr(args, 'enable_concurrent_adb') and args.use_webview_provider and
+      args.enable_concurrent_adb):
+    parser.error('--use-webview-provider and --enable-concurrent-adb cannot '
+                 'be used together')
+
+  if (getattr(args, 'coverage_on_the_fly', False)
+      and not getattr(args, 'coverage_dir', '')):
+    parser.error('--coverage-on-the-fly requires --coverage-dir')
+
+  if (hasattr(args, 'debug_socket') or
+      (hasattr(args, 'wait_for_java_debugger') and
+      args.wait_for_java_debugger)):
+    args.num_retries = 0
+
+  # Result-sink may not exist in the environment if rdb stream is not enabled.
+  result_sink_client = result_sink.TryInitClient()
+
+  try:
+    return RunTestsCommand(args, result_sink_client)
+  except base_error.BaseError as e:
+    logging.exception('Error occurred.')
+    if e.is_infra_error:
+      return constants.INFRA_EXIT_CODE
+    return constants.ERROR_EXIT_CODE
+  except: # pylint: disable=W0702
+    logging.exception('Unrecognized error occurred.')
+    return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test_runner.pydeps b/src/build/android/test_runner.pydeps
new file mode 100644
index 0000000..660f8f8
--- /dev/null
+++ b/src/build/android/test_runner.pydeps
@@ -0,0 +1,226 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_proto_classes.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_time.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/atexit_with_log.py
+../../third_party/catapult/common/py_utils/py_utils/binary_manager.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/modules_util.py
+../../third_party/catapult/common/py_utils/py_utils/retry_util.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/common/py_utils/py_utils/ts_proxy_server.py
+../../third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/battery_utils.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/crash_handler.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_list.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/forwarder.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/ports.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/device_recovery.py
+../../third_party/catapult/devil/devil/android/tools/device_status.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/android/tools/system_app.py
+../../third_party/catapult/devil/devil/android/tools/webview_app.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/file_utils.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/colorama/src/colorama/__init__.py
+../../third_party/colorama/src/colorama/ansi.py
+../../third_party/colorama/src/colorama/ansitowin32.py
+../../third_party/colorama/src/colorama/initialise.py
+../../third_party/colorama/src/colorama/win32.py
+../../third_party/colorama/src/colorama/winterm.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../../tools/swarming_client/libs/__init__.py
+../../tools/swarming_client/libs/logdog/__init__.py
+../../tools/swarming_client/libs/logdog/bootstrap.py
+../../tools/swarming_client/libs/logdog/stream.py
+../../tools/swarming_client/libs/logdog/streamname.py
+../../tools/swarming_client/libs/logdog/varint.py
+../gn_helpers.py
+../print_python_deps.py
+../skia_gold_common/__init__.py
+../skia_gold_common/skia_gold_properties.py
+../skia_gold_common/skia_gold_session.py
+../skia_gold_common/skia_gold_session_manager.py
+../util/lib/common/chrome_test_server_spawner.py
+../util/lib/common/unittest_util.py
+convert_dex_profile.py
+devil_chromium.py
+gyp/dex.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+gyp/util/zipalign.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/base/__init__.py
+pylib/base/base_test_result.py
+pylib/base/environment.py
+pylib/base/environment_factory.py
+pylib/base/output_manager.py
+pylib/base/output_manager_factory.py
+pylib/base/result_sink.py
+pylib/base/test_collection.py
+pylib/base/test_exception.py
+pylib/base/test_instance.py
+pylib/base/test_instance_factory.py
+pylib/base/test_run.py
+pylib/base/test_run_factory.py
+pylib/base/test_server.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/gtest/__init__.py
+pylib/gtest/gtest_test_instance.py
+pylib/instrumentation/__init__.py
+pylib/instrumentation/instrumentation_parser.py
+pylib/instrumentation/instrumentation_test_instance.py
+pylib/instrumentation/test_result.py
+pylib/junit/__init__.py
+pylib/junit/junit_test_instance.py
+pylib/local/__init__.py
+pylib/local/device/__init__.py
+pylib/local/device/local_device_environment.py
+pylib/local/device/local_device_gtest_run.py
+pylib/local/device/local_device_instrumentation_test_run.py
+pylib/local/device/local_device_monkey_test_run.py
+pylib/local/device/local_device_test_run.py
+pylib/local/emulator/__init__.py
+pylib/local/emulator/avd.py
+pylib/local/emulator/ini.py
+pylib/local/emulator/local_emulator_environment.py
+pylib/local/emulator/proto/__init__.py
+pylib/local/emulator/proto/avd_pb2.py
+pylib/local/local_test_server_spawner.py
+pylib/local/machine/__init__.py
+pylib/local/machine/local_machine_environment.py
+pylib/local/machine/local_machine_junit_test_run.py
+pylib/monkey/__init__.py
+pylib/monkey/monkey_test_instance.py
+pylib/output/__init__.py
+pylib/output/local_output_manager.py
+pylib/output/noop_output_manager.py
+pylib/output/remote_output_manager.py
+pylib/results/__init__.py
+pylib/results/flakiness_dashboard/__init__.py
+pylib/results/flakiness_dashboard/json_results_generator.py
+pylib/results/flakiness_dashboard/results_uploader.py
+pylib/results/json_results.py
+pylib/results/presentation/__init__.py
+pylib/results/presentation/standard_gtest_merge.py
+pylib/results/presentation/test_results_presentation.py
+pylib/results/report_results.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/symbols/stack_symbolizer.py
+pylib/utils/__init__.py
+pylib/utils/chrome_proxy_utils.py
+pylib/utils/decorators.py
+pylib/utils/device_dependencies.py
+pylib/utils/dexdump.py
+pylib/utils/gold_utils.py
+pylib/utils/google_storage_helper.py
+pylib/utils/instrumentation_tracing.py
+pylib/utils/local_utils.py
+pylib/utils/logdog_helper.py
+pylib/utils/logging_utils.py
+pylib/utils/proguard.py
+pylib/utils/repo_utils.py
+pylib/utils/shared_preference_utils.py
+pylib/utils/test_filter.py
+pylib/utils/time_profile.py
+pylib/valgrind_tools.py
+test_runner.py
+tombstones.py
diff --git a/src/build/android/test_wrapper/logdog_wrapper.py b/src/build/android/test_wrapper/logdog_wrapper.py
new file mode 100755
index 0000000..782d5d8
--- /dev/null
+++ b/src/build/android/test_wrapper/logdog_wrapper.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for adding logdog streaming support to swarming tasks."""
+
+import argparse
+import contextlib
+import logging
+import os
+import signal
+import subprocess
+import sys
+
+_SRC_PATH = os.path.abspath(os.path.join(
+    os.path.dirname(__file__), '..', '..', '..'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
+                             'py_utils'))
+
+from devil.utils import signal_handler
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+
+PROJECT = 'chromium'
+OUTPUT = 'logdog'
+COORDINATOR_HOST = 'luci-logdog.appspot.com'
+SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
+                        '/service-account-luci-logdog-publisher.json')
+LOGDOG_TERMINATION_TIMEOUT = 30
+
+
+def CommandParser():
+  # Parses the command line arguments being passed in
+  parser = argparse.ArgumentParser()
+  wrapped = parser.add_mutually_exclusive_group()
+  wrapped.add_argument(
+      '--target',
+      help='The test target to be run. If neither target nor script are set,'
+      ' any extra args passed to this script are assumed to be the'
+      ' full test command to run.')
+  wrapped.add_argument(
+      '--script',
+      help='The script target to be run. If neither target nor script are set,'
+      ' any extra args passed to this script are assumed to be the'
+      ' full test command to run.')
+  parser.add_argument('--logdog-bin-cmd', required=True,
+                      help='The logdog bin cmd.')
+  return parser
+
+
+def CreateStopTestsMethod(proc):
+  def StopTests(signum, _frame):
+    logging.error('Forwarding signal %s to test process', str(signum))
+    proc.send_signal(signum)
+  return StopTests
+
+
+@contextlib.contextmanager
+def NoLeakingProcesses(popen):
+  try:
+    yield popen
+  finally:
+    if popen is not None:
+      try:
+        if popen.poll() is None:
+          popen.kill()
+      except OSError:
+        logging.warning('Failed to kill %s. Process may be leaked.',
+                        str(popen.pid))
+
+
+def main():
+  parser = CommandParser()
+  args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
+
+  logging.basicConfig(level=logging.INFO)
+  if args.target:
+    test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v']
+    test_cmd += extra_cmd_args
+  elif args.script:
+    test_cmd = [args.script]
+    test_cmd += extra_cmd_args
+  else:
+    test_cmd = extra_cmd_args
+
+  test_env = dict(os.environ)
+  logdog_cmd = []
+
+  with tempfile_ext.NamedTemporaryDirectory(
+      prefix='tmp_android_logdog_wrapper') as temp_directory:
+    if not os.path.exists(args.logdog_bin_cmd):
+      logging.error(
+          'Logdog binary %s unavailable. Unable to create logdog client',
+          args.logdog_bin_cmd)
+    else:
+      streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
+                                                  'butler.sock')
+      prefix = os.path.join('android', 'swarming', 'logcats',
+                            os.environ.get('SWARMING_TASK_ID'))
+
+      logdog_cmd = [
+          args.logdog_bin_cmd,
+          '-project', PROJECT,
+          '-output', OUTPUT,
+          '-prefix', prefix,
+          '--service-account-json', SERVICE_ACCOUNT_JSON,
+          '-coordinator-host', COORDINATOR_HOST,
+          'serve',
+          '-streamserver-uri', streamserver_uri]
+      test_env.update({
+          'LOGDOG_STREAM_PROJECT': PROJECT,
+          'LOGDOG_STREAM_PREFIX': prefix,
+          'LOGDOG_STREAM_SERVER_PATH': streamserver_uri,
+          'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST,
+      })
+
+    logdog_proc = None
+    if logdog_cmd:
+      logdog_proc = subprocess.Popen(logdog_cmd)
+
+    with NoLeakingProcesses(logdog_proc):
+      with NoLeakingProcesses(
+          subprocess.Popen(test_cmd, env=test_env)) as test_proc:
+        with signal_handler.SignalHandler(signal.SIGTERM,
+                                          CreateStopTestsMethod(test_proc)):
+          result = test_proc.wait()
+          if logdog_proc:
+            def logdog_stopped():
+              return logdog_proc.poll() is not None
+
+            logdog_proc.terminate()
+            timeout_retry.WaitFor(logdog_stopped, wait_period=1,
+                                  max_tries=LOGDOG_TERMINATION_TIMEOUT)
+
+            # If logdog_proc hasn't finished by this point, allow
+            # NoLeakingProcesses to kill it.
+
+
+  return result
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test_wrapper/logdog_wrapper.pydeps b/src/build/android/test_wrapper/logdog_wrapper.pydeps
new file mode 100644
index 0000000..0e8d039
--- /dev/null
+++ b/src/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -0,0 +1,12 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/test_wrapper --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py
+../../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+logdog_wrapper.py
diff --git a/src/build/android/tests/symbolize/Makefile b/src/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000..4fc53da
--- /dev/null
+++ b/src/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+	$(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/src/build/android/tests/symbolize/a.cc b/src/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000..f0c7ca4
--- /dev/null
+++ b/src/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+  A();
+  void Foo(int i);
+  void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/src/build/android/tests/symbolize/b.cc b/src/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000..db87520
--- /dev/null
+++ b/src/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+  B();
+  void Baz(float f);
+  void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/src/build/android/tests/symbolize/liba.so b/src/build/android/tests/symbolize/liba.so
new file mode 100644
index 0000000..79cb739
--- /dev/null
+++ b/src/build/android/tests/symbolize/liba.so
Binary files differ
diff --git a/src/build/android/tests/symbolize/libb.so b/src/build/android/tests/symbolize/libb.so
new file mode 100644
index 0000000..7cf01d4
--- /dev/null
+++ b/src/build/android/tests/symbolize/libb.so
Binary files differ
diff --git a/src/build/android/tombstones.py b/src/build/android/tombstones.py
new file mode 100755
index 0000000..082e7c1
--- /dev/null
+++ b/src/build/android/tombstones.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import argparse
+import datetime
+import logging
+import os
+import sys
+
+from multiprocessing.pool import ThreadPool
+
+import devil_chromium
+
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.symbols import stack_symbolizer
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+
+def _ListTombstones(device):
+  """List the tombstone files on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Yields:
+    Tuples of (tombstone filename, date time of file on device).
+  """
+  try:
+    if not device.PathExists('/data/tombstones', as_root=True):
+      return
+    entries = device.StatDirectory('/data/tombstones', as_root=True)
+    for entry in entries:
+      if 'tombstone' in entry['filename']:
+        yield (entry['filename'],
+               datetime.datetime.fromtimestamp(entry['st_mtime']))
+  except device_errors.CommandFailedError:
+    logging.exception('Could not retrieve tombstones.')
+  except device_errors.DeviceUnreachableError:
+    logging.exception('Device unreachable retrieving tombstones.')
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timed out retrieving tombstones.')
+
+
+def _GetDeviceDateTime(device):
+  """Determine the date time on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Returns:
+    A datetime instance.
+  """
+  device_now_string = device.RunShellCommand(
+      ['date'], check_return=True, env=_TZ_UTC)
+  return datetime.datetime.strptime(
+      device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+  """Retrieve the tombstone data from the device
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to retrieve
+
+  Returns:
+    A list of lines
+  """
+  return device.ReadFile(
+      '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+  """Deletes a tombstone from the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to delete.
+  """
+  return device.RunShellCommand(
+      ['rm', '/data/tombstones/' + tombstone_file],
+      as_root=True, check_return=True)
+
+
+def _ResolveTombstone(args):
+  tombstone = args[0]
+  tombstone_symbolizer = args[1]
+  lines = []
+  lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+            ', about this long ago: ' +
+            (str(tombstone['device_now'] - tombstone['time']) +
+            ' Device: ' + tombstone['serial'])]
+  logging.info('\n'.join(lines))
+  logging.info('Resolving...')
+  lines += tombstone_symbolizer.ExtractAndResolveNativeStackTraces(
+      tombstone['data'],
+      tombstone['device_abi'],
+      tombstone['stack'])
+  return lines
+
+
+def _ResolveTombstones(jobs, tombstones, tombstone_symbolizer):
+  """Resolve a list of tombstones.
+
+  Args:
+    jobs: the number of jobs to use with multithread.
+    tombstones: a list of tombstones.
+  """
+  if not tombstones:
+    logging.warning('No tombstones to resolve.')
+    return []
+  if len(tombstones) == 1:
+    data = [_ResolveTombstone([tombstones[0], tombstone_symbolizer])]
+  else:
+    pool = ThreadPool(jobs)
+    data = pool.map(
+        _ResolveTombstone,
+        [[tombstone, tombstone_symbolizer] for tombstone in tombstones])
+    pool.close()
+    pool.join()
+  resolved_tombstones = []
+  for tombstone in data:
+    resolved_tombstones.extend(tombstone)
+  return resolved_tombstones
+
+
+def _GetTombstonesForDevice(device, resolve_all_tombstones,
+                            include_stack_symbols,
+                            wipe_tombstones):
+  """Returns a list of tombstones on a given device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    resolve_all_tombstone: Whether to resolve every tombstone.
+    include_stack_symbols: Whether to include symbols for stack data.
+    wipe_tombstones: Whether to wipe tombstones.
+  """
+  ret = []
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones.')
+    return ret
+
+  # Sort the tombstones in date order, descending
+  all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+  # Only resolve the most recent unless --all-tombstones given.
+  tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]]
+
+  device_now = _GetDeviceDateTime(device)
+  try:
+    for tombstone_file, tombstone_time in tombstones:
+      ret += [{'serial': str(device),
+               'device_abi': device.product_cpu_abi,
+               'device_now': device_now,
+               'time': tombstone_time,
+               'file': tombstone_file,
+               'stack': include_stack_symbols,
+               'data': _GetTombstoneData(device, tombstone_file)}]
+  except device_errors.CommandFailedError:
+    for entry in device.StatDirectory(
+        '/data/tombstones', as_root=True, timeout=60):
+      logging.info('%s: %s', str(device), entry)
+    raise
+
+  # Erase all the tombstones if desired.
+  if wipe_tombstones:
+    for tombstone_file, _ in all_tombstones:
+      _EraseTombstone(device, tombstone_file)
+
+  return ret
+
+
+def ClearAllTombstones(device):
+  """Clear all tombstones in the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+  """
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones to clear.')
+
+  for tombstone_file, _ in all_tombstones:
+    _EraseTombstone(device, tombstone_file)
+
+
+def ResolveTombstones(device, resolve_all_tombstones, include_stack_symbols,
+                      wipe_tombstones, jobs=4, apk_under_test=None,
+                      tombstone_symbolizer=None):
+  """Resolve tombstones in the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    resolve_all_tombstone: Whether to resolve every tombstone.
+    include_stack_symbols: Whether to include symbols for stack data.
+    wipe_tombstones: Whether to wipe tombstones.
+    jobs: Number of jobs to use when processing multiple crash stacks.
+
+  Returns:
+    A list of resolved tombstones.
+  """
+  return _ResolveTombstones(jobs,
+                            _GetTombstonesForDevice(device,
+                                                    resolve_all_tombstones,
+                                                    include_stack_symbols,
+                                                    wipe_tombstones),
+                            (tombstone_symbolizer
+                             or stack_symbolizer.Symbolizer(apk_under_test)))
+
+
+def main():
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
+  logging.getLogger().setLevel(logging.INFO)
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--device',
+                      help='The serial number of the device. If not specified '
+                           'will use all devices.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('-a', '--all-tombstones', action='store_true',
+                      help='Resolve symbols for all tombstones, rather than '
+                           'just the most recent.')
+  parser.add_argument('-s', '--stack', action='store_true',
+                      help='Also include symbols for stack data')
+  parser.add_argument('-w', '--wipe-tombstones', action='store_true',
+                      help='Erase all tombstones from device after processing')
+  parser.add_argument('-j', '--jobs', type=int,
+                      default=4,
+                      help='Number of jobs to use when processing multiple '
+                           'crash stacks.')
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('--adb-path', type=os.path.abspath,
+                      help='Path to the adb binary.')
+  args = parser.parse_args()
+
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory(),
+                            adb_path=args.adb_path)
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+
+  if args.device:
+    devices = [device_utils.DeviceUtils(args.device)]
+  else:
+    devices = device_utils.DeviceUtils.HealthyDevices(denylist)
+
+  # This must be done serially because strptime can hit a race condition if
+  # used for the first time in a multithreaded environment.
+  # http://bugs.python.org/issue7980
+  for device in devices:
+    resolved_tombstones = ResolveTombstones(
+        device, args.all_tombstones,
+        args.stack, args.wipe_tombstones, args.jobs)
+    for line in resolved_tombstones:
+      logging.info(line)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/update_deps/update_third_party_deps.py b/src/build/android/update_deps/update_third_party_deps.py
new file mode 100755
index 0000000..3a869c4
--- /dev/null
+++ b/src/build/android/update_deps/update_third_party_deps.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Uploads or downloads third party libraries to or from google cloud storage.
+
+This script will only work for Android checkouts.
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+
+sys.path.append(os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(
+    os.path.abspath(
+        os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools')))
+import download_from_google_storage
+import upload_to_google_storage
+
+
+def _AddBasicArguments(parser):
+  parser.add_argument(
+      '--sdk-root', default=constants.ANDROID_SDK_ROOT,
+      help='base path to the Android SDK root')
+  parser.add_argument(
+      '-v', '--verbose', action='store_true', help='print debug information')
+  parser.add_argument(
+      '-b', '--bucket-path', required=True,
+      help='The path of the lib file in Google Cloud Storage.')
+  parser.add_argument(
+      '-l', '--local-path', required=True,
+      help='The base path of the third_party directory')
+
+
+def _CheckPaths(bucket_path, local_path):
+  if bucket_path.startswith('gs://'):
+    bucket_url = bucket_path
+  else:
+    bucket_url = 'gs://%s' % bucket_path
+  local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path)
+  if not os.path.isdir(local_path):
+    raise IOError(
+        'The library local path is not a valid directory: %s' % local_path)
+  return bucket_url, local_path
+
+
+def _CheckFileList(local_path, file_list):
+  local_path = os.path.abspath(local_path)
+  abs_path_list = [os.path.abspath(f) for f in file_list]
+  for f in abs_path_list:
+    if os.path.commonprefix([f, local_path]) != local_path:
+      raise IOError(
+          '%s in the arguments is not descendant of the specified directory %s'
+          % (f, local_path))
+  return abs_path_list
+
+
+def _PurgeSymlinks(local_path):
+  for dirpath, _, filenames in os.walk(local_path):
+    for f in filenames:
+      path = os.path.join(dirpath, f)
+      if os.path.islink(path):
+        os.remove(path)
+
+
+def Upload(arguments):
+  """Upload files in a third_party directory to google storage"""
+  bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+                                       arguments.local_path)
+  file_list = _CheckFileList(local_path, arguments.file_list)
+  return upload_to_google_storage.upload_to_google_storage(
+      input_filenames=file_list,
+      base_url=bucket_url,
+      gsutil=arguments.gsutil,
+      force=False,
+      use_md5=False,
+      num_threads=1,
+      skip_hashing=False,
+      gzip=None)
+
+
+def Download(arguments):
+  """Download files based on sha1 files in a third_party dir from gcs"""
+  bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+                                       arguments.local_path)
+  _PurgeSymlinks(local_path)
+  return download_from_google_storage.download_from_google_storage(
+      local_path,
+      bucket_url,
+      gsutil=arguments.gsutil,
+      num_threads=1,
+      directory=True,
+      recursive=True,
+      force=False,
+      output=None,
+      ignore_errors=False,
+      sha1_file=None,
+      verbose=arguments.verbose,
+      auto_platform=False,
+      extract=False)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  subparsers = parser.add_subparsers(title='commands')
+  download_parser = subparsers.add_parser(
+      'download', help='download the library from the cloud storage')
+  _AddBasicArguments(download_parser)
+  download_parser.set_defaults(func=Download)
+
+  upload_parser = subparsers.add_parser(
+      'upload', help='find all jar files in a third_party directory and ' +
+                     'upload them to cloud storage')
+  _AddBasicArguments(upload_parser)
+  upload_parser.set_defaults(func=Upload)
+  upload_parser.add_argument(
+      '-f', '--file-list', nargs='+', required=True,
+      help='A list of base paths for files in third_party to upload.')
+
+  arguments = parser.parse_args(argv)
+  if not os.path.isdir(arguments.sdk_root):
+    logging.debug('Did not find the Android SDK root directory at "%s".',
+                  arguments.sdk_root)
+    logging.info('Skipping, not on an android checkout.')
+    return 0
+
+  arguments.gsutil = download_from_google_storage.Gsutil(
+      download_from_google_storage.GSUTIL_DEFAULT_PATH)
+  return arguments.func(arguments)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/update_verification.py b/src/build/android/update_verification.py
new file mode 100755
index 0000000..3d478f4
--- /dev/null
+++ b/src/build/android/update_verification.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import apk_helper
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+  device.Install(old_apk)
+  raw_input('Set the application state. Once ready, press enter and '
+            'select "Backup my data" on the device.')
+  device.adb.Backup(app_data, packages=[package_name])
+  logging.critical('Application data saved to %s', app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+  device.Install(old_apk)
+  device.adb.Restore(app_data)
+  # Restore command is not synchronous
+  raw_input('Select "Restore my data" on the device. Then press enter to '
+            'continue.')
+  if not device.IsApplicationInstalled(package_name):
+    raise Exception('Expected package %s to already be installed. '
+                    'Package name might have changed!' % package_name)
+
+  logging.info('Verifying that %s can be overinstalled.', new_apk)
+  device.adb.Install(new_apk, reinstall=True)
+  logging.critical('Successfully updated to the new apk. Please verify that '
+                   'the application data is preserved.')
+
+def main():
+  parser = argparse.ArgumentParser(
+      description="Script to do semi-automated upgrade testing.")
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Print verbose log information.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  command_parsers = parser.add_subparsers(dest='command')
+
+  subparser = command_parsers.add_parser('create_app_data')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup should be '
+                           'saved to.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  subparser = command_parsers.add_parser('test_update')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--new-apk', required=True,
+                         help='Path to apk to update to.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup is saved.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  args = parser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devil_chromium.Initialize()
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+
+  devices = device_utils.DeviceUtils.HealthyDevices(denylist)
+  if not devices:
+    raise device_errors.NoDevicesError()
+  device = devices[0]
+  logging.info('Using device %s for testing.', str(device))
+
+  package_name = (args.package_name if args.package_name
+                  else apk_helper.GetPackageName(args.old_apk))
+  if args.command == 'create_app_data':
+    CreateAppData(device, args.old_apk, args.app_data, package_name)
+  elif args.command == 'test_update':
+    TestUpdate(
+        device, args.old_apk, args.new_apk, args.app_data, package_name)
+  else:
+    raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/video_recorder.py b/src/build/android/video_recorder.py
new file mode 100755
index 0000000..6c54e7a
--- /dev/null
+++ b/src/build/android/video_recorder.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import video_recorder
+
+if __name__ == '__main__':
+  devil_chromium.Initialize()
+  sys.exit(video_recorder.main())